1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including:
20 - SQLite & SpatiaLite
21 - MySQL
22 - Postgres
23 - Firebird
24 - Oracle
25 - MS SQL
26 - DB2
27 - Interbase
28 - Ingres
29 - Informix (9+ and SE)
30 - SapDB (experimental)
31 - Cubrid (experimental)
32 - CouchDB (experimental)
33 - MongoDB (in progress)
34 - Google:nosql
35 - Google:sql
36 - Teradata
37 - IMAP (experimental)
38
39 Example of usage:
40
41 >>> # from dal import DAL, Field
42
43 ### create DAL connection (and create DB if it doesn't exist)
44 >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
45 ... folder=None)
46
47 ### define a table 'person' (create/alter as necessary)
48 >>> person = db.define_table('person',Field('name','string'))
49
50 ### insert a record
51 >>> id = person.insert(name='James')
52
53 ### retrieve it by id
54 >>> james = person(id)
55
56 ### retrieve it by name
57 >>> james = person(name='James')
58
59 ### retrieve it by arbitrary query
60 >>> query = (person.name=='James') & (person.name.startswith('J'))
61 >>> james = db(query).select(person.ALL)[0]
62
63 ### update one record
64 >>> james.update_record(name='Jim')
65 <Row {'id': 1, 'name': 'Jim'}>
66
67 ### update multiple records by query
68 >>> db(person.name.like('J%')).update(name='James')
69 1
70
71 ### delete records by query
72 >>> db(person.name.lower() == 'jim').delete()
73 0
74
75 ### retrieve multiple records (rows)
76 >>> people = db(person).select(orderby=person.name,
77 ... groupby=person.name, limitby=(0,100))
78
79 ### further filter them
80 >>> james = people.find(lambda row: row.name == 'James').first()
81 >>> print james.id, james.name
82 1 James
83
84 ### check aggregates
85 >>> counter = person.id.count()
86 >>> print db(person).select(counter).first()(counter)
87 1
88
89 ### delete one record
90 >>> james.delete_record()
91 1
92
93 ### delete (drop) entire database table
94 >>> person.drop()
95
96 Supported field types:
97 id string text boolean integer double decimal password upload
98 blob time date datetime
99
100 Supported DAL URI strings:
101 'sqlite://test.db'
102 'spatialite://test.db'
103 'sqlite:memory'
104 'spatialite:memory'
105 'jdbc:sqlite://test.db'
106 'mysql://root:none@localhost/test'
107 'postgres://mdipierro:password@localhost/test'
108 'postgres:psycopg2://mdipierro:password@localhost/test'
109 'postgres:pg8000://mdipierro:password@localhost/test'
110 'jdbc:postgres://mdipierro:none@localhost/test'
111 'mssql://web2py:none@A64X2/web2py_test'
112 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
113 'oracle://username:password@database'
114 'firebird://user:password@server:3050/database'
115 'db2://DSN=dsn;UID=user;PWD=pass'
116 'firebird://username:password@hostname/database'
117 'firebird_embedded://username:password@c://path'
118 'informix://user:password@server:3050/database'
119 'informixu://user:password@server:3050/database' # unicode informix
120 'ingres://database' # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name'
121 'google:datastore' # for google app engine datastore
122 'google:sql' # for google app engine with sql (mysql compatible)
123 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
124 'imap://user:password@server:port' # experimental
125 'mongodb://user:password@server:port/database' # experimental
126
127 For more info:
128 help(DAL)
129 help(Field)
130 """
131
132
133
134
135
136 __all__ = ['DAL', 'Field']
137
138 DEFAULTLENGTH = {'string':512,
139 'password':512,
140 'upload':512,
141 'text':2**15,
142 'blob':2**31}
143 TIMINGSSIZE = 100
144 SPATIALLIBS = {
145 'Windows':'libspatialite',
146 'Linux':'libspatialite.so',
147 'Darwin':'libspatialite.dylib'
148 }
149 DEFAULT_URI = 'sqlite://dummy.db'
150
151 import re
152 import sys
153 import locale
154 import os
155 import types
156 import datetime
157 import threading
158 import time
159 import csv
160 import cgi
161 import copy
162 import socket
163 import logging
164 import base64
165 import shutil
166 import marshal
167 import decimal
168 import struct
169 import urllib
170 import hashlib
171 import uuid
172 import glob
173 import traceback
174 import platform
175
176 PYTHON_VERSION = sys.version_info[0]
177 if PYTHON_VERSION == 2:
178 import cPickle as pickle
179 import cStringIO as StringIO
180 import copy_reg as copyreg
181 hashlib_md5 = hashlib.md5
182 bytes, unicode = str, unicode
183 else:
184 import pickle
185 from io import StringIO as StringIO
186 import copyreg
187 long = int
188 hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8'))
189 bytes, unicode = bytes, str
190
191 CALLABLETYPES = (types.LambdaType, types.FunctionType,
192 types.BuiltinFunctionType,
193 types.MethodType, types.BuiltinMethodType)
194
195 TABLE_ARGS = set(
196 ('migrate','primarykey','fake_migrate','format','redefine',
197 'singular','plural','trigger_name','sequence_name','fields',
198 'common_filter','polymodel','table_class','on_define','actual_name'))
199
200 SELECT_ARGS = set(
201 ('orderby', 'groupby', 'limitby','required', 'cache', 'left',
202 'distinct', 'having', 'join','for_update', 'processor','cacheable', 'orderby_on_limitby'))
203
204 ogetattr = object.__getattribute__
205 osetattr = object.__setattr__
206 exists = os.path.exists
207 pjoin = os.path.join
208
209
210
211
212 try:
213 from gluon.utils import web2py_uuid
214 except (ImportError, SystemError):
215 import uuid
217
218 try:
219 import portalocker
220 have_portalocker = True
221 except ImportError:
222 have_portalocker = False
223
224 try:
225 from gluon import serializers
226 have_serializers = True
227 except ImportError:
228 have_serializers = False
229 try:
230 import json as simplejson
231 except ImportError:
232 try:
233 import gluon.contrib.simplejson as simplejson
234 except ImportError:
235 simplejson = None
236
237 LOGGER = logging.getLogger("web2py.dal")
238 DEFAULT = lambda:0
239
240 GLOBAL_LOCKER = threading.RLock()
241 THREAD_LOCAL = threading.local()
242
243
244
245
246 REGEX_TYPE = re.compile('^([\w\_\:]+)')
247 REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
248 REGEX_W = re.compile('^\w+$')
249 REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
250 REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)(\.(?P<name>\w+))?\.\w+$')
251 REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
252 REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
253 REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
254 REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
255 REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
256 REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
257 REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
258 REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
259 REGEX_QUOTES = re.compile("'[^']*'")
260 REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
261 REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
262 REGEX_NOPASSWD = re.compile('\/\/[\w\.\-]+[\:\/](.+)(?=@)')
263
264
265
266 DRIVERS = []
267
268 try:
269 from new import classobj
270 from google.appengine.ext import db as gae
271 from google.appengine.api import namespace_manager, rdbms
272 from google.appengine.api.datastore_types import Key
273 from google.appengine.ext.db.polymodel import PolyModel
274 DRIVERS.append('google')
275 except ImportError:
276 pass
277
278 if not 'google' in DRIVERS:
279
280 try:
281 from pysqlite2 import dbapi2 as sqlite2
282 DRIVERS.append('SQLite(sqlite2)')
283 except ImportError:
284 LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
285
286 try:
287 from sqlite3 import dbapi2 as sqlite3
288 DRIVERS.append('SQLite(sqlite3)')
289 except ImportError:
290 LOGGER.debug('no SQLite drivers sqlite3')
291
292 try:
293
294 try:
295 import gluon.contrib.pymysql as pymysql
296
297
298 pymysql.ESCAPE_REGEX = re.compile("'")
299 pymysql.ESCAPE_MAP = {"'": "''"}
300
301 except ImportError:
302 import pymysql
303 DRIVERS.append('MySQL(pymysql)')
304 except ImportError:
305 LOGGER.debug('no MySQL driver pymysql')
306
307 try:
308 import MySQLdb
309 DRIVERS.append('MySQL(MySQLdb)')
310 except ImportError:
311 LOGGER.debug('no MySQL driver MySQLDB')
312
313 try:
314 import mysql.connector as mysqlconnector
315 DRIVERS.append("MySQL(mysqlconnector)")
316 except ImportError:
317 LOGGER.debug("no driver mysql.connector")
318
319 try:
320 import psycopg2
321 from psycopg2.extensions import adapt as psycopg2_adapt
322 DRIVERS.append('PostgreSQL(psycopg2)')
323 except ImportError:
324 LOGGER.debug('no PostgreSQL driver psycopg2')
325
326 try:
327
328 try:
329 import gluon.contrib.pg8000.dbapi as pg8000
330 except ImportError:
331 import pg8000.dbapi as pg8000
332 DRIVERS.append('PostgreSQL(pg8000)')
333 except ImportError:
334 LOGGER.debug('no PostgreSQL driver pg8000')
335
336 try:
337 import cx_Oracle
338 DRIVERS.append('Oracle(cx_Oracle)')
339 except ImportError:
340 LOGGER.debug('no Oracle driver cx_Oracle')
341
342 try:
343 try:
344 import pyodbc
345 except ImportError:
346 try:
347 import gluon.contrib.pypyodbc as pyodbc
348 except Exception, e:
349 raise ImportError(str(e))
350 DRIVERS.append('MSSQL(pyodbc)')
351 DRIVERS.append('DB2(pyodbc)')
352 DRIVERS.append('Teradata(pyodbc)')
353 DRIVERS.append('Ingres(pyodbc)')
354 except ImportError:
355 LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc')
356
357 try:
358 import Sybase
359 DRIVERS.append('Sybase(Sybase)')
360 except ImportError:
361 LOGGER.debug('no Sybase driver')
362
363 try:
364 import kinterbasdb
365 DRIVERS.append('Interbase(kinterbasdb)')
366 DRIVERS.append('Firebird(kinterbasdb)')
367 except ImportError:
368 LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
369
370 try:
371 import fdb
372 DRIVERS.append('Firebird(fdb)')
373 except ImportError:
374 LOGGER.debug('no Firebird driver fdb')
375
376 try:
377 import firebirdsql
378 DRIVERS.append('Firebird(firebirdsql)')
379 except ImportError:
380 LOGGER.debug('no Firebird driver firebirdsql')
381
382 try:
383 import informixdb
384 DRIVERS.append('Informix(informixdb)')
385 LOGGER.warning('Informix support is experimental')
386 except ImportError:
387 LOGGER.debug('no Informix driver informixdb')
388
389 try:
390 import sapdb
391 DRIVERS.append('SQL(sapdb)')
392 LOGGER.warning('SAPDB support is experimental')
393 except ImportError:
394 LOGGER.debug('no SAP driver sapdb')
395
396 try:
397 import cubriddb
398 DRIVERS.append('Cubrid(cubriddb)')
399 LOGGER.warning('Cubrid support is experimental')
400 except ImportError:
401 LOGGER.debug('no Cubrid driver cubriddb')
402
403 try:
404 from com.ziclix.python.sql import zxJDBC
405 import java.sql
406
407 from org.sqlite import JDBC
408 zxJDBC_sqlite = java.sql.DriverManager
409 DRIVERS.append('PostgreSQL(zxJDBC)')
410 DRIVERS.append('SQLite(zxJDBC)')
411 LOGGER.warning('zxJDBC support is experimental')
412 is_jdbc = True
413 except ImportError:
414 LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
415 is_jdbc = False
416
417 try:
418 import couchdb
419 DRIVERS.append('CouchDB(couchdb)')
420 except ImportError:
421 LOGGER.debug('no Couchdb driver couchdb')
422
423 try:
424 import pymongo
425 DRIVERS.append('MongoDB(pymongo)')
426 except:
427 LOGGER.debug('no MongoDB driver pymongo')
428
429 try:
430 import imaplib
431 DRIVERS.append('IMAP(imaplib)')
432 except:
433 LOGGER.debug('no IMAP driver imaplib')
434
435 PLURALIZE_RULES = [
436 (re.compile('child$'), re.compile('child$'), 'children'),
437 (re.compile('oot$'), re.compile('oot$'), 'eet'),
438 (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
439 (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
440 (re.compile('sis$'), re.compile('sis$'), 'ses'),
441 (re.compile('man$'), re.compile('man$'), 'men'),
442 (re.compile('ife$'), re.compile('ife$'), 'ives'),
443 (re.compile('eau$'), re.compile('eau$'), 'eaux'),
444 (re.compile('lf$'), re.compile('lf$'), 'lves'),
445 (re.compile('[sxz]$'), re.compile('$'), 'es'),
446 (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
447 (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
448 (re.compile('$'), re.compile('$'), 's'),
449 ]
456
461
464
467
469
472
474 regex = re.compile('\.keyword(?=\w)')
475 a = regex.sub('."%s"' % keyword,a)
476 return a
477
478 if 'google' in DRIVERS:
479
480 is_jdbc = False
483 """
484 GAE decimal implementation
485 """
486 data_type = decimal.Decimal
487
488 - def __init__(self, precision, scale, **kwargs):
489 super(GAEDecimalProperty, self).__init__(self, **kwargs)
490 d = '1.'
491 for x in range(scale):
492 d += '0'
493 self.round = decimal.Decimal(d)
494
502
504 if value is None or value == '':
505 return None
506 else:
507 return decimal.Decimal(value).quantize(self.round)
508
510 value = super(GAEDecimalProperty, self).validate(value)
511 if value is None or isinstance(value, decimal.Decimal):
512 return value
513 elif isinstance(value, basestring):
514 return decimal.Decimal(value)
515 raise gae.BadValueError("Property %s must be a Decimal or string."\
516 % self.name)
517
523
524 POOLS = {}
525 check_active_connection = True
526
527 @staticmethod
530
531
532
533 - def close(self,action='commit',really=True):
550
551 @staticmethod
553 """ to close cleanly databases in a multithreaded environment """
554 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
555 for db_uid, db_group in dbs:
556 for db in db_group:
557 if hasattr(db,'_adapter'):
558 db._adapter.close(action)
559 getattr(THREAD_LOCAL,'db_instances',{}).clear()
560 getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
561 if callable(action):
562 action(None)
563 return
564
577
579 """hook for the after_connection parameter"""
580 if callable(self._after_connection):
581 self._after_connection(self)
582 self.after_connection()
583
585 """ this it is supposed to be overloaded by adapters"""
586 pass
587
589 """
590 this function defines: self.connection and self.cursor
591 (iff cursor is True)
592 if self.pool_size>0 it will try pull the connection from the pool
593 if the connection is not active (closed by db server) it will loop
594 if not self.pool_size or no active connections in pool makes a new one
595 """
596 if getattr(self,'connection', None) != None:
597 return
598 if f is None:
599 f = self.connector
600
601
602
603
604
605 if not self.pool_size:
606 self.connection = f()
607 self.cursor = cursor and self.connection.cursor()
608 else:
609 uri = self.uri
610 POOLS = ConnectionPool.POOLS
611 while True:
612 GLOBAL_LOCKER.acquire()
613 if not uri in POOLS:
614 POOLS[uri] = []
615 if POOLS[uri]:
616 self.connection = POOLS[uri].pop()
617 GLOBAL_LOCKER.release()
618 self.cursor = cursor and self.connection.cursor()
619 try:
620 if self.cursor and self.check_active_connection:
621 self.execute('SELECT 1;')
622 break
623 except:
624 pass
625 else:
626 GLOBAL_LOCKER.release()
627 self.connection = f()
628 self.cursor = cursor and self.connection.cursor()
629 break
630 self.after_connection_hook()
631
638 native_json = False
639 driver = None
640 driver_name = None
641 drivers = ()
642 connection = None
643 commit_on_alter_table = False
644 support_distributed_transaction = False
645 uploads_in_blob = False
646 can_select_for_update = True
647 dbpath = None
648 folder = None
649
650 TRUE = 'T'
651 FALSE = 'F'
652 T_SEP = ' '
653 QUOTE_TEMPLATE = '"%s"'
654
655 types = {
656 'boolean': 'CHAR(1)',
657 'string': 'CHAR(%(length)s)',
658 'text': 'TEXT',
659 'json': 'TEXT',
660 'password': 'CHAR(%(length)s)',
661 'blob': 'BLOB',
662 'upload': 'CHAR(%(length)s)',
663 'integer': 'INTEGER',
664 'bigint': 'INTEGER',
665 'float':'DOUBLE',
666 'double': 'DOUBLE',
667 'decimal': 'DOUBLE',
668 'date': 'DATE',
669 'time': 'TIME',
670 'datetime': 'TIMESTAMP',
671 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
672 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
673 'list:integer': 'TEXT',
674 'list:string': 'TEXT',
675 'list:reference': 'TEXT',
676
677 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT',
678 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
679 }
680
682 if not hasattr(self.driver, "OperationalError"):
683 return None
684 return isinstance(exception, self.driver.OperationalError)
685
687 if not hasattr(self.driver, "ProgrammingError"):
688 return None
689 return isinstance(exception, self.driver.ProgrammingError)
690
692 pkeys = getattr(table,'_primarykey',None)
693 if pkeys:
694 return table[pkeys[0]] != None
695 else:
696 return table._id != None
697
699 return "'%s'" % obj.replace("'", "''")
700
702 if isinstance(obj,(int,float)):
703 return str(obj)
704 return self.adapt(str(obj))
705
707 """
708 to be used ONLY for files that on GAE may not be on filesystem
709 """
710 return exists(filename)
711
712 - def file_open(self, filename, mode='rb', lock=True):
713 """
714 to be used ONLY for files that on GAE may not be on filesystem
715 """
716 if have_portalocker and lock:
717 fileobj = portalocker.LockedFile(filename,mode)
718 else:
719 fileobj = open(filename,mode)
720 return fileobj
721
723 """
724 to be used ONLY for files that on GAE may not be on filesystem
725 """
726 if fileobj:
727 fileobj.close()
728
731
733 self.adapter_args = adapter_args
734 if getattr(self,'driver',None) != None:
735 return
736 drivers_available = [driver for driver in self.drivers
737 if driver in globals()]
738 if uri:
739 items = uri.split('://',1)[0].split(':')
740 request_driver = items[1] if len(items)>1 else None
741 else:
742 request_driver = None
743 request_driver = request_driver or adapter_args.get('driver')
744 if request_driver:
745 if request_driver in drivers_available:
746 self.driver_name = request_driver
747 self.driver = globals().get(request_driver)
748 else:
749 raise RuntimeError("driver %s not available" % request_driver)
750 elif drivers_available:
751 self.driver_name = drivers_available[0]
752 self.driver = globals().get(self.driver_name)
753 else:
754 raise RuntimeError("no driver available %s" % str(self.drivers))
755
756 - def log(self, message, table=None):
757 """ Logs migrations
758
759 It will not log changes if logfile is not specified. Defaults
760 to sql.log
761 """
762
763 isabs = None
764 logfilename = self.adapter_args.get('logfile','sql.log')
765 writelog = bool(logfilename)
766 if writelog:
767 isabs = os.path.isabs(logfilename)
768
769 if table and table._dbt and writelog and self.folder:
770 if isabs:
771 table._loggername = logfilename
772 else:
773 table._loggername = pjoin(self.folder, logfilename)
774 logfile = self.file_open(table._loggername, 'a')
775 logfile.write(message)
776 self.file_close(logfile)
777
778
779 - def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
780 credential_decoder=IDENTITY, driver_args={},
781 adapter_args={},do_connect=True, after_connection=None):
782 self.db = db
783 self.dbengine = "None"
784 self.uri = uri
785 self.pool_size = pool_size
786 self.folder = folder
787 self.db_codec = db_codec
788 self._after_connection = after_connection
789 class Dummy(object):
790 lastrowid = 1
791 def __getattr__(self, value):
792 return lambda *a, **b: []
793 self.connection = Dummy()
794 self.cursor = Dummy()
795
797 return '%s_sequence' % tablename
798
800 return '%s_sequence' % tablename
801
804
805 - def create_table(self, table,
806 migrate=True,
807 fake_migrate=False,
808 polymodel=None):
809 db = table._db
810 fields = []
811
812 postcreation_fields = []
813 sql_fields = {}
814 sql_fields_aux = {}
815 TFK = {}
816 tablename = table._tablename
817 sortable = 0
818 types = self.types
819 for field in table:
820 sortable += 1
821 field_name = field.name
822 field_type = field.type
823 if isinstance(field_type,SQLCustomType):
824 ftype = field_type.native or field_type.type
825 elif field_type.startswith('reference'):
826 referenced = field_type[10:].strip()
827 if referenced == '.':
828 referenced = tablename
829 constraint_name = self.constraint_name(tablename, field_name)
830 if not '.' in referenced \
831 and referenced != tablename \
832 and hasattr(table,'_primarykey'):
833 ftype = types['integer']
834 else:
835 if hasattr(table,'_primarykey'):
836 rtablename,rfieldname = referenced.split('.')
837 rtable = db[rtablename]
838 rfield = rtable[rfieldname]
839
840 if rfieldname in rtable._primarykey or \
841 rfield.unique:
842 ftype = types[rfield.type[:9]] % \
843 dict(length=rfield.length)
844
845 if not rfield.unique and len(rtable._primarykey)>1:
846
847 if rtablename not in TFK:
848 TFK[rtablename] = {}
849 TFK[rtablename][rfieldname] = field_name
850 else:
851 ftype = ftype + \
852 types['reference FK'] % dict(
853 constraint_name = constraint_name,
854 foreign_key = '%s (%s)' % (rtablename,
855 rfieldname),
856 table_name = tablename,
857 field_name = field_name,
858 on_delete_action=field.ondelete)
859 else:
860
861 if referenced in db:
862 id_fieldname = db[referenced]._id.name
863 elif referenced == tablename:
864 id_fieldname = table._id.name
865 else:
866 id_fieldname = 'id'
867 ftype = types[field_type[:9]] % dict(
868 index_name = field_name+'__idx',
869 field_name = field_name,
870 constraint_name = constraint_name,
871 foreign_key = '%s (%s)' % (referenced,
872 id_fieldname),
873 on_delete_action=field.ondelete)
874 elif field_type.startswith('list:reference'):
875 ftype = types[field_type[:14]]
876 elif field_type.startswith('decimal'):
877 precision, scale = map(int,field_type[8:-1].split(','))
878 ftype = types[field_type[:7]] % \
879 dict(precision=precision,scale=scale)
880 elif field_type.startswith('geo'):
881 if not hasattr(self,'srid'):
882 raise RuntimeError('Adapter does not support geometry')
883 srid = self.srid
884 geotype, parms = field_type[:-1].split('(')
885 if not geotype in types:
886 raise SyntaxError(
887 'Field: unknown field type: %s for %s' \
888 % (field_type, field_name))
889 ftype = types[geotype]
890 if self.dbengine == 'postgres' and geotype == 'geometry':
891
892 dimension = 2
893 parms = parms.split(',')
894 if len(parms) == 3:
895 schema, srid, dimension = parms
896 elif len(parms) == 2:
897 schema, srid = parms
898 else:
899 schema = parms[0]
900 ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]
901 ftype = ftype % dict(schema=schema,
902 tablename=tablename,
903 fieldname=field_name, srid=srid,
904 dimension=dimension)
905 postcreation_fields.append(ftype)
906 elif not field_type in types:
907 raise SyntaxError('Field: unknown field type: %s for %s' % \
908 (field_type, field_name))
909 else:
910 ftype = types[field_type]\
911 % dict(length=field.length)
912 if not field_type.startswith('id') and \
913 not field_type.startswith('reference'):
914 if field.notnull:
915 ftype += ' NOT NULL'
916 else:
917 ftype += self.ALLOW_NULL()
918 if field.unique:
919 ftype += ' UNIQUE'
920 if field.custom_qualifier:
921 ftype += ' %s' % field.custom_qualifier
922
923
924 sql_fields[field_name] = dict(
925 length=field.length,
926 unique=field.unique,
927 notnull=field.notnull,
928 sortable=sortable,
929 type=str(field_type),
930 sql=ftype)
931
932 if field.notnull and not field.default is None:
933
934
935
936
937
938
939 not_null = self.NOT_NULL(field.default, field_type)
940 ftype = ftype.replace('NOT NULL', not_null)
941 sql_fields_aux[field_name] = dict(sql=ftype)
942
943
944 if not (self.dbengine == 'postgres' and \
945 field_type.startswith('geom')):
946 fields.append('%s %s' % (field_name, ftype))
947 other = ';'
948
949
950 if self.dbengine == 'mysql':
951 if not hasattr(table, "_primarykey"):
952 fields.append('PRIMARY KEY(%s)' % table._id.name)
953 other = ' ENGINE=InnoDB CHARACTER SET utf8;'
954
955 fields = ',\n '.join(fields)
956 for rtablename in TFK:
957 rfields = TFK[rtablename]
958 pkeys = db[rtablename]._primarykey
959 fkeys = [ rfields[k] for k in pkeys ]
960 fields = fields + ',\n ' + \
961 types['reference TFK'] % dict(
962 table_name = tablename,
963 field_name=', '.join(fkeys),
964 foreign_table = rtablename,
965 foreign_key = ', '.join(pkeys),
966 on_delete_action = field.ondelete)
967
968 if getattr(table,'_primarykey',None):
969 query = "CREATE TABLE %s(\n %s,\n %s) %s" % \
970 (tablename, fields,
971 self.PRIMARY_KEY(', '.join(table._primarykey)),other)
972 else:
973 query = "CREATE TABLE %s(\n %s\n)%s" % \
974 (tablename, fields, other)
975
976 if self.uri.startswith('sqlite:///') \
977 or self.uri.startswith('spatialite:///'):
978 path_encoding = sys.getfilesystemencoding() \
979 or locale.getdefaultlocale()[1] or 'utf8'
980 dbpath = self.uri[9:self.uri.rfind('/')]\
981 .decode('utf8').encode(path_encoding)
982 else:
983 dbpath = self.folder
984
985 if not migrate:
986 return query
987 elif self.uri.startswith('sqlite:memory')\
988 or self.uri.startswith('spatialite:memory'):
989 table._dbt = None
990 elif isinstance(migrate, str):
991 table._dbt = pjoin(dbpath, migrate)
992 else:
993 table._dbt = pjoin(
994 dbpath, '%s_%s.table' % (table._db._uri_hash, tablename))
995
996 if not table._dbt or not self.file_exists(table._dbt):
997 if table._dbt:
998 self.log('timestamp: %s\n%s\n'
999 % (datetime.datetime.today().isoformat(),
1000 query), table)
1001 if not fake_migrate:
1002 self.create_sequence_and_triggers(query,table)
1003 table._db.commit()
1004
1005
1006 for query in postcreation_fields:
1007 self.execute(query)
1008 table._db.commit()
1009 if table._dbt:
1010 tfile = self.file_open(table._dbt, 'w')
1011 pickle.dump(sql_fields, tfile)
1012 self.file_close(tfile)
1013 if fake_migrate:
1014 self.log('faked!\n', table)
1015 else:
1016 self.log('success!\n', table)
1017 else:
1018 tfile = self.file_open(table._dbt, 'r')
1019 try:
1020 sql_fields_old = pickle.load(tfile)
1021 except EOFError:
1022 self.file_close(tfile)
1023 raise RuntimeError('File %s appears corrupted' % table._dbt)
1024 self.file_close(tfile)
1025 if sql_fields != sql_fields_old:
1026 self.migrate_table(table,
1027 sql_fields, sql_fields_old,
1028 sql_fields_aux, None,
1029 fake_migrate=fake_migrate)
1030 return query
1031
1032 - def migrate_table(
1033 self,
1034 table,
1035 sql_fields,
1036 sql_fields_old,
1037 sql_fields_aux,
1038 logfile,
1039 fake_migrate=False,
1040 ):
1041
1042
1043 db = table._db
1044 db._migrated.append(table._tablename)
1045 tablename = table._tablename
1046 def fix(item):
1047 k,v=item
1048 if not isinstance(v,dict):
1049 v=dict(type='unknown',sql=v)
1050 return k.lower(),v
1051
1052
1053 sql_fields = dict(map(fix,sql_fields.iteritems()))
1054 sql_fields_old = dict(map(fix,sql_fields_old.iteritems()))
1055 sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems()))
1056 if db._debug:
1057 logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields))
1058
1059 keys = sql_fields.keys()
1060 for key in sql_fields_old:
1061 if not key in keys:
1062 keys.append(key)
1063 new_add = self.concat_add(tablename)
1064
1065 metadata_change = False
1066 sql_fields_current = copy.copy(sql_fields_old)
1067 for key in keys:
1068 query = None
1069 if not key in sql_fields_old:
1070 sql_fields_current[key] = sql_fields[key]
1071 if self.dbengine in ('postgres',) and \
1072 sql_fields[key]['type'].startswith('geometry'):
1073
1074 query = [ sql_fields[key]['sql'] ]
1075 else:
1076 query = ['ALTER TABLE %s ADD %s %s;' % \
1077 (tablename, key,
1078 sql_fields_aux[key]['sql'].replace(', ', new_add))]
1079 metadata_change = True
1080 elif self.dbengine in ('sqlite', 'spatialite'):
1081 if key in sql_fields:
1082 sql_fields_current[key] = sql_fields[key]
1083 metadata_change = True
1084 elif not key in sql_fields:
1085 del sql_fields_current[key]
1086 ftype = sql_fields_old[key]['type']
1087 if (self.dbengine in ('postgres',) and
1088 ftype.startswith('geometry')):
1089 geotype, parms = ftype[:-1].split('(')
1090 schema = parms.split(',')[0]
1091 query = [ "SELECT DropGeometryColumn ('%(schema)s', "+
1092 "'%(table)s', '%(field)s');" %
1093 dict(schema=schema, table=tablename, field=key,) ]
1094 elif self.dbengine in ('firebird',):
1095 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
1096 else:
1097 query = ['ALTER TABLE %s DROP COLUMN %s;' %
1098 (tablename, key)]
1099 metadata_change = True
1100 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
1101 and not (key in table.fields and
1102 isinstance(table[key].type, SQLCustomType)) \
1103 and not sql_fields[key]['type'].startswith('reference')\
1104 and not sql_fields[key]['type'].startswith('double')\
1105 and not sql_fields[key]['type'].startswith('id'):
1106 sql_fields_current[key] = sql_fields[key]
1107 t = tablename
1108 tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
1109 if self.dbengine in ('firebird',):
1110 drop_expr = 'ALTER TABLE %s DROP %s;'
1111 else:
1112 drop_expr = 'ALTER TABLE %s DROP COLUMN %s;'
1113 key_tmp = key + '__tmp'
1114 query = ['ALTER TABLE %s ADD %s %s;' % (t, key_tmp, tt),
1115 'UPDATE %s SET %s=%s;' % (t, key_tmp, key),
1116 drop_expr % (t, key),
1117 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
1118 'UPDATE %s SET %s=%s;' % (t, key, key_tmp),
1119 drop_expr % (t, key_tmp)]
1120 metadata_change = True
1121 elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
1122 sql_fields_current[key] = sql_fields[key]
1123 metadata_change = True
1124
1125 if query:
1126 self.log('timestamp: %s\n'
1127 % datetime.datetime.today().isoformat(), table)
1128 db['_lastsql'] = '\n'.join(query)
1129 for sub_query in query:
1130 self.log(sub_query + '\n', table)
1131 if fake_migrate:
1132 if db._adapter.commit_on_alter_table:
1133 self.save_dbt(table,sql_fields_current)
1134 self.log('faked!\n', table)
1135 else:
1136 self.execute(sub_query)
1137
1138
1139
1140
1141
1142 if db._adapter.commit_on_alter_table:
1143 db.commit()
1144 self.save_dbt(table,sql_fields_current)
1145 self.log('success!\n', table)
1146
1147 elif metadata_change:
1148 self.save_dbt(table,sql_fields_current)
1149
1150 if metadata_change and not (query and db._adapter.commit_on_alter_table):
1151 db.commit()
1152 self.save_dbt(table,sql_fields_current)
1153 self.log('success!\n', table)
1154
1155 - def save_dbt(self,table, sql_fields_current):
1156 tfile = self.file_open(table._dbt, 'w')
1157 pickle.dump(sql_fields_current, tfile)
1158 self.file_close(tfile)
1159
1160 - def LOWER(self, first):
1162
1163 - def UPPER(self, first):
1165
1166 - def COUNT(self, first, distinct=None):
1167 return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \
1168 % self.expand(first)
1169
1171 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
1172
1173 - def EPOCH(self, first):
1175
1178
1181
1184
1187
1190
1191 - def NOT_NULL(self, default, field_type):
1192 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
1193
1195 expressions = [self.expand(first)]+[self.expand(e) for e in second]
1196 return 'COALESCE(%s)' % ','.join(expressions)
1197
1200
1201 - def RAW(self, first):
1203
1206
1208 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1209
1211 return 'PRIMARY KEY(%s)' % key
1212
1213 - def _drop(self, table, mode):
1214 return ['DROP TABLE %s;' % table]
1215
1216 - def drop(self, table, mode=''):
1230
1231 - def _insert(self, table, fields):
1238
1240 return 'INSERT INTO %s DEFAULT VALUES;' % table
1241
1242 - def insert(self, table, fields):
1243 query = self._insert(table,fields)
1244 try:
1245 self.execute(query)
1246 except Exception:
1247 e = sys.exc_info()[1]
1248 if hasattr(table,'_on_insert_error'):
1249 return table._on_insert_error(table,fields,e)
1250 raise e
1251 if hasattr(table,'_primarykey'):
1252 return dict([(k[0].name, k[1]) for k in fields \
1253 if k[0].name in table._primarykey])
1254 id = self.lastrowid(table)
1255 if not isinstance(id,int):
1256 return id
1257 rid = Reference(id)
1258 (rid._table, rid._record) = (table, None)
1259 return rid
1260
1262 return [self.insert(table,item) for item in items]
1263
1264 - def NOT(self, first):
1266
1267 - def AND(self, first, second):
1269
1270 - def OR(self, first, second):
1272
1273 - def BELONGS(self, first, second):
1274 if isinstance(second, str):
1275 return '(%s IN (%s))' % (self.expand(first), second[:-1])
1276 if not second:
1277 return '(1=0)'
1278 items = ','.join(self.expand(item, first.type) for item in second)
1279 return '(%s IN (%s))' % (self.expand(first), items)
1280
1281 - def REGEXP(self, first, second):
1282 "regular expression operator"
1283 raise NotImplementedError
1284
1285 - def LIKE(self, first, second):
1286 "case sensitive like operator"
1287 raise NotImplementedError
1288
1289 - def ILIKE(self, first, second):
1290 "case in-sensitive like operator"
1291 return '(%s LIKE %s)' % (self.expand(first),
1292 self.expand(second, 'string'))
1293
1295 return '(%s LIKE %s)' % (self.expand(first),
1296 self.expand(second+'%', 'string'))
1297
1299 return '(%s LIKE %s)' % (self.expand(first),
1300 self.expand('%'+second, 'string'))
1301
1302 - def CONTAINS(self,first,second,case_sensitive=False):
1303 if first.type in ('string','text', 'json'):
1304 if isinstance(second,Expression):
1305 second = Expression(None,self.CONCAT('%',Expression(
1306 None,self.REPLACE(second,('%','%%'))),'%'))
1307 else:
1308 second = '%'+str(second).replace('%','%%')+'%'
1309 elif first.type.startswith('list:'):
1310 if isinstance(second,Expression):
1311 second = Expression(None,self.CONCAT(
1312 '%|',Expression(None,self.REPLACE(
1313 Expression(None,self.REPLACE(
1314 second,('%','%%'))),('|','||'))),'|%'))
1315 else:
1316 second = '%|'+str(second).replace('%','%%')\
1317 .replace('|','||')+'|%'
1318 op = case_sensitive and self.LIKE or self.ILIKE
1319 return op(first,second)
1320
1321 - def EQ(self, first, second=None):
1326
1327 - def NE(self, first, second=None):
1332
1333 - def LT(self,first,second=None):
1334 if second is None:
1335 raise RuntimeError("Cannot compare %s < None" % first)
1336 return '(%s < %s)' % (self.expand(first),
1337 self.expand(second,first.type))
1338
1339 - def LE(self,first,second=None):
1340 if second is None:
1341 raise RuntimeError("Cannot compare %s <= None" % first)
1342 return '(%s <= %s)' % (self.expand(first),
1343 self.expand(second,first.type))
1344
1345 - def GT(self,first,second=None):
1346 if second is None:
1347 raise RuntimeError("Cannot compare %s > None" % first)
1348 return '(%s > %s)' % (self.expand(first),
1349 self.expand(second,first.type))
1350
1351 - def GE(self,first,second=None):
1352 if second is None:
1353 raise RuntimeError("Cannot compare %s >= None" % first)
1354 return '(%s >= %s)' % (self.expand(first),
1355 self.expand(second,first.type))
1356
1358 return ftype in ('integer','boolean','double','bigint') or \
1359 ftype.startswith('decimal')
1360
1361 - def REPLACE(self, first, (second, third)):
1362 return 'REPLACE(%s,%s,%s)' % (self.expand(first,'string'),
1363 self.expand(second,'string'),
1364 self.expand(third,'string'))
1365
1368
1369 - def ADD(self, first, second):
1375
1376 - def SUB(self, first, second):
1379
1380 - def MUL(self, first, second):
1383
1384 - def DIV(self, first, second):
1387
1388 - def MOD(self, first, second):
1391
1392 - def AS(self, first, second):
1394
1395 - def ON(self, first, second):
1399
1402
1403 - def COMMA(self, first, second):
1405
1406 - def CAST(self, first, second):
1407 return 'CAST(%s AS %s)' % (first, second)
1408
1409 - def expand(self, expression, field_type=None):
1410 if isinstance(expression, Field):
1411 out = '%s.%s' % (expression.table._tablename, expression.name)
1412 if field_type == 'string' and not expression.type in (
1413 'string','text','json','password'):
1414 out = self.CAST(out, self.types['text'])
1415 return out
1416 elif isinstance(expression, (Expression, Query)):
1417 first = expression.first
1418 second = expression.second
1419 op = expression.op
1420 optional_args = expression.optional_args or {}
1421 if not second is None:
1422 out = op(first, second, **optional_args)
1423 elif not first is None:
1424 out = op(first,**optional_args)
1425 elif isinstance(op, str):
1426 if op.endswith(';'):
1427 op=op[:-1]
1428 out = '(%s)' % op
1429 else:
1430 out = op()
1431 return out
1432 elif field_type:
1433 return str(self.represent(expression,field_type))
1434 elif isinstance(expression,(list,tuple)):
1435 return ','.join(self.represent(item,field_type) \
1436 for item in expression)
1437 elif isinstance(expression, bool):
1438 return '1' if expression else '0'
1439 else:
1440 return str(expression)
1441
1444
1445 - def alias(self, table, alias):
1446 """
1447 Given a table object, makes a new table object
1448 with alias name.
1449 """
1450 other = copy.copy(table)
1451 other['_ot'] = other._ot or other._tablename
1452 other['ALL'] = SQLALL(other)
1453 other['_tablename'] = alias
1454 for fieldname in other.fields:
1455 other[fieldname] = copy.copy(other[fieldname])
1456 other[fieldname]._tablename = alias
1457 other[fieldname].tablename = alias
1458 other[fieldname].table = other
1459 table._db[alias] = other
1460 return other
1461
1463 tablename = table._tablename
1464 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
1465
1467
1468 try:
1469 queries = table._db._adapter._truncate(table, mode)
1470 for query in queries:
1471 self.log(query + '\n', table)
1472 self.execute(query)
1473 table._db.commit()
1474 self.log('success!\n', table)
1475 finally:
1476 pass
1477
1478 - def _update(self, tablename, query, fields):
1479 if query:
1480 if use_common_filters(query):
1481 query = self.common_filter(query, [tablename])
1482 sql_w = ' WHERE ' + self.expand(query)
1483 else:
1484 sql_w = ''
1485 sql_v = ','.join(['%s=%s' % (field.name,
1486 self.expand(value, field.type)) \
1487 for (field, value) in fields])
1488 tablename = "%s" % self.db[tablename]
1489 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
1490
1491 - def update(self, tablename, query, fields):
1492 sql = self._update(tablename, query, fields)
1493 try:
1494 self.execute(sql)
1495 except Exception:
1496 e = sys.exc_info()[1]
1497 table = self.db[tablename]
1498 if hasattr(table,'_on_update_error'):
1499 return table._on_update_error(table,query,fields,e)
1500 raise e
1501 try:
1502 return self.cursor.rowcount
1503 except:
1504 return None
1505
1506 - def _delete(self, tablename, query):
1507 if query:
1508 if use_common_filters(query):
1509 query = self.common_filter(query, [tablename])
1510 sql_w = ' WHERE ' + self.expand(query)
1511 else:
1512 sql_w = ''
1513 return 'DELETE FROM %s%s;' % (tablename, sql_w)
1514
1515 - def delete(self, tablename, query):
1516 sql = self._delete(tablename, query)
1517
1518 db = self.db
1519 table = db[tablename]
1520 if self.dbengine in ('sqlite', 'spatialite') and table._referenced_by:
1521 deleted = [x[table._id.name] for x in db(query).select(table._id)]
1522
1523 self.execute(sql)
1524 try:
1525 counter = self.cursor.rowcount
1526 except:
1527 counter = None
1528
1529 if self.dbengine in ('sqlite', 'spatialite') and counter:
1530 for field in table._referenced_by:
1531 if field.type=='reference '+table._tablename \
1532 and field.ondelete=='CASCADE':
1533 db(field.belongs(deleted)).delete()
1534
1535 return counter
1536
1538 tablenames = self.tables(query)
1539 if len(tablenames)==1:
1540 return tablenames[0]
1541 elif len(tablenames)<1:
1542 raise RuntimeError("No table selected")
1543 else:
1544 raise RuntimeError("Too many tables selected")
1545
1547 db = self.db
1548 new_fields = []
1549 append = new_fields.append
1550 for item in fields:
1551 if isinstance(item,SQLALL):
1552 new_fields += item._table
1553 elif isinstance(item,str):
1554 if REGEX_TABLE_DOT_FIELD.match(item):
1555 tablename,fieldname = item.split('.')
1556 append(db[tablename][fieldname])
1557 else:
1558 append(Expression(db,lambda item=item:item))
1559 else:
1560 append(item)
1561
1562 if not new_fields:
1563 for table in tablenames:
1564 for field in db[table]:
1565 append(field)
1566 return new_fields
1567
1568 - def _select(self, query, fields, attributes):
1569 tables = self.tables
1570 for key in set(attributes.keys())-SELECT_ARGS:
1571 raise SyntaxError('invalid select attribute: %s' % key)
1572 args_get = attributes.get
1573 tablenames = tables(query)
1574 tablenames_for_common_filters = tablenames
1575 for field in fields:
1576 if isinstance(field, basestring) \
1577 and REGEX_TABLE_DOT_FIELD.match(field):
1578 tn,fn = field.split('.')
1579 field = self.db[tn][fn]
1580 for tablename in tables(field):
1581 if not tablename in tablenames:
1582 tablenames.append(tablename)
1583
1584 if len(tablenames) < 1:
1585 raise SyntaxError('Set: no tables selected')
1586 self._colnames = map(self.expand, fields)
1587 def geoexpand(field):
1588 if isinstance(field.type,str) and field.type.startswith('geometry'):
1589 field = field.st_astext()
1590 return self.expand(field)
1591 sql_f = ', '.join(map(geoexpand, fields))
1592 sql_o = ''
1593 sql_s = ''
1594 left = args_get('left', False)
1595 inner_join = args_get('join', False)
1596 distinct = args_get('distinct', False)
1597 groupby = args_get('groupby', False)
1598 orderby = args_get('orderby', False)
1599 having = args_get('having', False)
1600 limitby = args_get('limitby', False)
1601 orderby_on_limitby = args_get('orderby_on_limitby', True)
1602 for_update = args_get('for_update', False)
1603 if self.can_select_for_update is False and for_update is True:
1604 raise SyntaxError('invalid select attribute: for_update')
1605 if distinct is True:
1606 sql_s += 'DISTINCT'
1607 elif distinct:
1608 sql_s += 'DISTINCT ON (%s)' % distinct
1609 if inner_join:
1610 icommand = self.JOIN()
1611 if not isinstance(inner_join, (tuple, list)):
1612 inner_join = [inner_join]
1613 ijoint = [t._tablename for t in inner_join
1614 if not isinstance(t,Expression)]
1615 ijoinon = [t for t in inner_join if isinstance(t, Expression)]
1616 itables_to_merge={}
1617 [itables_to_merge.update(
1618 dict.fromkeys(tables(t))) for t in ijoinon]
1619 ijoinont = [t.first._tablename for t in ijoinon]
1620 [itables_to_merge.pop(t) for t in ijoinont
1621 if t in itables_to_merge]
1622 iimportant_tablenames = ijoint + ijoinont + itables_to_merge.keys()
1623 iexcluded = [t for t in tablenames
1624 if not t in iimportant_tablenames]
1625 if left:
1626 join = attributes['left']
1627 command = self.LEFT_JOIN()
1628 if not isinstance(join, (tuple, list)):
1629 join = [join]
1630 joint = [t._tablename for t in join
1631 if not isinstance(t, Expression)]
1632 joinon = [t for t in join if isinstance(t, Expression)]
1633
1634 tables_to_merge={}
1635 [tables_to_merge.update(
1636 dict.fromkeys(tables(t))) for t in joinon]
1637 joinont = [t.first._tablename for t in joinon]
1638 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1639 tablenames_for_common_filters = [t for t in tablenames
1640 if not t in joinont ]
1641 important_tablenames = joint + joinont + tables_to_merge.keys()
1642 excluded = [t for t in tablenames
1643 if not t in important_tablenames ]
1644 else:
1645 excluded = tablenames
1646
1647 if use_common_filters(query):
1648 query = self.common_filter(query,tablenames_for_common_filters)
1649 sql_w = ' WHERE ' + self.expand(query) if query else ''
1650
1651 if inner_join and not left:
1652 sql_t = ', '.join([self.table_alias(t) for t in iexcluded + \
1653 itables_to_merge.keys()])
1654 for t in ijoinon:
1655 sql_t += ' %s %s' % (icommand, t)
1656 elif not inner_join and left:
1657 sql_t = ', '.join([self.table_alias(t) for t in excluded + \
1658 tables_to_merge.keys()])
1659 if joint:
1660 sql_t += ' %s %s' % (command,
1661 ','.join([self.table_alias(t) for t in joint]))
1662 for t in joinon:
1663 sql_t += ' %s %s' % (command, t)
1664 elif inner_join and left:
1665 all_tables_in_query = set(important_tablenames + \
1666 iimportant_tablenames + \
1667 tablenames)
1668 tables_in_joinon = set(joinont + ijoinont)
1669 tables_not_in_joinon = \
1670 all_tables_in_query.difference(tables_in_joinon)
1671 sql_t = ','.join([self.table_alias(t) for t in tables_not_in_joinon])
1672 for t in ijoinon:
1673 sql_t += ' %s %s' % (icommand, t)
1674 if joint:
1675 sql_t += ' %s %s' % (command,
1676 ','.join([self.table_alias(t) for t in joint]))
1677 for t in joinon:
1678 sql_t += ' %s %s' % (command, t)
1679 else:
1680 sql_t = ', '.join(self.table_alias(t) for t in tablenames)
1681 if groupby:
1682 if isinstance(groupby, (list, tuple)):
1683 groupby = xorify(groupby)
1684 sql_o += ' GROUP BY %s' % self.expand(groupby)
1685 if having:
1686 sql_o += ' HAVING %s' % attributes['having']
1687 if orderby:
1688 if isinstance(orderby, (list, tuple)):
1689 orderby = xorify(orderby)
1690 if str(orderby) == '<random>':
1691 sql_o += ' ORDER BY %s' % self.RANDOM()
1692 else:
1693 sql_o += ' ORDER BY %s' % self.expand(orderby)
1694 if (limitby and not groupby and tablenames and orderby_on_limitby and not orderby):
1695 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in (hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey or [self.db[t]._id.name])])
1696
1697 sql = self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1698 if for_update and self.can_select_for_update is True:
1699 sql = sql.rstrip(';') + ' FOR UPDATE;'
1700 return sql
1701
1702 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1703 if limitby:
1704 (lmin, lmax) = limitby
1705 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1706 return 'SELECT %s %s FROM %s%s%s;' % \
1707 (sql_s, sql_f, sql_t, sql_w, sql_o)
1708
1710 return self.cursor.fetchall()
1711
1713 args_get = attributes.get
1714 cache = args_get('cache',None)
1715 if not cache:
1716 self.execute(sql)
1717 rows = self._fetchall()
1718 else:
1719 (cache_model, time_expire) = cache
1720 key = self.uri + '/' + sql + '/rows'
1721 if len(key)>200: key = hashlib_md5(key).hexdigest()
1722 def _select_aux2():
1723 self.execute(sql)
1724 return self._fetchall()
1725 rows = cache_model(key,_select_aux2,time_expire)
1726 if isinstance(rows,tuple):
1727 rows = list(rows)
1728 limitby = args_get('limitby', None) or (0,)
1729 rows = self.rowslice(rows,limitby[0],None)
1730 processor = args_get('processor',self.parse)
1731 cacheable = args_get('cacheable',False)
1732 return processor(rows,fields,self._colnames,cacheable=cacheable)
1733
1734 - def select(self, query, fields, attributes):
1735 """
1736 Always returns a Rows object, possibly empty.
1737 """
1738 sql = self._select(query, fields, attributes)
1739 cache = attributes.get('cache', None)
1740 if cache and attributes.get('cacheable',False):
1741 del attributes['cache']
1742 (cache_model, time_expire) = cache
1743 key = self.uri + '/' + sql
1744 if len(key)>200: key = hashlib_md5(key).hexdigest()
1745 args = (sql,fields,attributes)
1746 return cache_model(
1747 key,
1748 lambda self=self,args=args:self._select_aux(*args),
1749 time_expire)
1750 else:
1751 return self._select_aux(sql,fields,attributes)
1752
1753 - def _count(self, query, distinct=None):
1754 tablenames = self.tables(query)
1755 if query:
1756 if use_common_filters(query):
1757 query = self.common_filter(query, tablenames)
1758 sql_w = ' WHERE ' + self.expand(query)
1759 else:
1760 sql_w = ''
1761 sql_t = ','.join(self.table_alias(t) for t in tablenames)
1762 if distinct:
1763 if isinstance(distinct,(list, tuple)):
1764 distinct = xorify(distinct)
1765 sql_d = self.expand(distinct)
1766 return 'SELECT count(DISTINCT %s) FROM %s%s;' % \
1767 (sql_d, sql_t, sql_w)
1768 return 'SELECT count(*) FROM %s%s;' % (sql_t, sql_w)
1769
1770 - def count(self, query, distinct=None):
1771 self.execute(self._count(query, distinct))
1772 return self.cursor.fetchone()[0]
1773
1785
1789
1793
1799
1802
1805
1808
1811
1814
1816 return '%s_%s__constraint' % (table,fieldname)
1817
1820
1822 if not self.connection: return None
1823 command = a[0]
1824 if hasattr(self,'filter_sql_command'):
1825 command = self.filter_sql_command(command)
1826 if self.db._debug:
1827 LOGGER.debug('SQL: %s' % command)
1828 self.db._lastsql = command
1829 t0 = time.time()
1830 ret = self.cursor.execute(command, *a[1:], **b)
1831 self.db._timings.append((command,time.time()-t0))
1832 del self.db._timings[:-TIMINGSSIZE]
1833 return ret
1834
1837
1839 field_is_type = fieldtype.startswith
1840 if isinstance(obj, CALLABLETYPES):
1841 obj = obj()
1842 if isinstance(fieldtype, SQLCustomType):
1843 value = fieldtype.encoder(obj)
1844 if fieldtype.type in ('string','text', 'json'):
1845 return self.adapt(value)
1846 return value
1847 if isinstance(obj, (Expression, Field)):
1848 return str(obj)
1849 if field_is_type('list:'):
1850 if not obj:
1851 obj = []
1852 elif not isinstance(obj, (list, tuple)):
1853 obj = [obj]
1854 if field_is_type('list:string'):
1855 obj = map(str,obj)
1856 else:
1857 obj = map(int,[o for o in obj if o != ''])
1858
1859 if isinstance(obj, (list, tuple)) and (not fieldtype == "json"):
1860 obj = bar_encode(obj)
1861 if obj is None:
1862 return 'NULL'
1863 if obj == '' and not fieldtype[:2] in ['st', 'te', 'js', 'pa', 'up']:
1864 return 'NULL'
1865 r = self.represent_exceptions(obj, fieldtype)
1866 if not r is None:
1867 return r
1868 if fieldtype == 'boolean':
1869 if obj and not str(obj)[:1].upper() in '0F':
1870 return self.smart_adapt(self.TRUE)
1871 else:
1872 return self.smart_adapt(self.FALSE)
1873 if fieldtype == 'id' or fieldtype == 'integer':
1874 return str(long(obj))
1875 if field_is_type('decimal'):
1876 return str(obj)
1877 elif field_is_type('reference'):
1878 if fieldtype.find('.')>0:
1879 return repr(obj)
1880 elif isinstance(obj, (Row, Reference)):
1881 return str(obj['id'])
1882 return str(long(obj))
1883 elif fieldtype == 'double':
1884 return repr(float(obj))
1885 if isinstance(obj, unicode):
1886 obj = obj.encode(self.db_codec)
1887 if fieldtype == 'blob':
1888 obj = base64.b64encode(str(obj))
1889 elif fieldtype == 'date':
1890 if isinstance(obj, (datetime.date, datetime.datetime)):
1891 obj = obj.isoformat()[:10]
1892 else:
1893 obj = str(obj)
1894 elif fieldtype == 'datetime':
1895 if isinstance(obj, datetime.datetime):
1896 obj = obj.isoformat(self.T_SEP)[:19]
1897 elif isinstance(obj, datetime.date):
1898 obj = obj.isoformat()[:10]+self.T_SEP+'00:00:00'
1899 else:
1900 obj = str(obj)
1901 elif fieldtype == 'time':
1902 if isinstance(obj, datetime.time):
1903 obj = obj.isoformat()[:10]
1904 else:
1905 obj = str(obj)
1906 elif fieldtype == 'json':
1907 if not self.native_json:
1908 if have_serializers:
1909 obj = serializers.json(obj)
1910 elif simplejson:
1911 obj = simplejson.dumps(obj)
1912 else:
1913 raise RuntimeError("missing simplejson")
1914 if not isinstance(obj,bytes):
1915 obj = bytes(obj)
1916 try:
1917 obj.decode(self.db_codec)
1918 except:
1919 obj = obj.decode('latin1').encode(self.db_codec)
1920 return self.adapt(obj)
1921
1924
1927
1928 - def rowslice(self, rows, minimum=0, maximum=None):
1929 """
1930 By default this function does nothing;
1931 overload when db does not do slicing.
1932 """
1933 return rows
1934
1935 - def parse_value(self, value, field_type, blob_decode=True):
1936 if field_type != 'blob' and isinstance(value, str):
1937 try:
1938 value = value.decode(self.db._db_codec)
1939 except Exception:
1940 pass
1941 if isinstance(value, unicode):
1942 value = value.encode('utf-8')
1943 if isinstance(field_type, SQLCustomType):
1944 value = field_type.decoder(value)
1945 if not isinstance(field_type, str) or value is None:
1946 return value
1947 elif field_type in ('string', 'text', 'password', 'upload', 'dict'):
1948 return value
1949 elif field_type.startswith('geo'):
1950 return value
1951 elif field_type == 'blob' and not blob_decode:
1952 return value
1953 else:
1954 key = REGEX_TYPE.match(field_type).group(0)
1955 return self.parsemap[key](value,field_type)
1956
1958 referee = field_type[10:].strip()
1959 if not '.' in referee:
1960 value = Reference(value)
1961 value._table, value._record = self.db[referee], None
1962 return value
1963
1965 return value == self.TRUE or str(value)[:1].lower() == 't'
1966
1968 if isinstance(value, datetime.datetime):
1969 return value.date()
1970 if not isinstance(value, (datetime.date,datetime.datetime)):
1971 (y, m, d) = map(int, str(value)[:10].strip().split('-'))
1972 value = datetime.date(y, m, d)
1973 return value
1974
1976 if not isinstance(value, datetime.time):
1977 time_items = map(int,str(value)[:8].strip().split(':')[:3])
1978 if len(time_items) == 3:
1979 (h, mi, s) = time_items
1980 else:
1981 (h, mi, s) = time_items + [0]
1982 value = datetime.time(h, mi, s)
1983 return value
1984
1986 if not isinstance(value, datetime.datetime):
1987 value = str(value)
1988 date_part,time_part,timezone = value[:10],value[11:19],value[19:]
1989 if '+' in timezone:
1990 ms,tz = timezone.split('+')
1991 h,m = tz.split(':')
1992 dt = datetime.timedelta(seconds=3600*int(h)+60*int(m))
1993 elif '-' in timezone:
1994 ms,tz = timezone.split('-')
1995 h,m = tz.split(':')
1996 dt = -datetime.timedelta(seconds=3600*int(h)+60*int(m))
1997 else:
1998 dt = None
1999 (y, m, d) = map(int,date_part.split('-'))
2000 time_parts = time_part and time_part.split(':')[:3] or (0,0,0)
2001 while len(time_parts)<3: time_parts.append(0)
2002 time_items = map(int,time_parts)
2003 (h, mi, s) = time_items
2004 value = datetime.datetime(y, m, d, h, mi, s)
2005 if dt:
2006 value = value + dt
2007 return value
2008
2010 return base64.b64decode(str(value))
2011
2013 decimals = int(field_type[8:-1].split(',')[-1])
2014 if self.dbengine in ('sqlite', 'spatialite'):
2015 value = ('%.' + str(decimals) + 'f') % value
2016 if not isinstance(value, decimal.Decimal):
2017 value = decimal.Decimal(str(value))
2018 return value
2019
2024
2029
2034
2035 - def parse_id(self, value, field_type):
2037
2040
2043
2045 if not self.native_json:
2046 if not isinstance(value, basestring):
2047 raise RuntimeError('json data not a string')
2048 if isinstance(value, unicode):
2049 value = value.encode('utf-8')
2050 if have_serializers:
2051 value = serializers.loads_json(value)
2052 elif simplejson:
2053 value = simplejson.loads(value)
2054 else:
2055 raise RuntimeError("missing simplejson")
2056 return value
2057
2059 self.parsemap = {
2060 'id':self.parse_id,
2061 'integer':self.parse_integer,
2062 'bigint':self.parse_integer,
2063 'float':self.parse_double,
2064 'double':self.parse_double,
2065 'reference':self.parse_reference,
2066 'boolean':self.parse_boolean,
2067 'date':self.parse_date,
2068 'time':self.parse_time,
2069 'datetime':self.parse_datetime,
2070 'blob':self.parse_blob,
2071 'decimal':self.parse_decimal,
2072 'json':self.parse_json,
2073 'list:integer':self.parse_list_integers,
2074 'list:reference':self.parse_list_references,
2075 'list:string':self.parse_list_strings,
2076 }
2077
2078 - def parse(self, rows, fields, colnames, blob_decode=True,
2079 cacheable = False):
2080 db = self.db
2081 virtualtables = []
2082 new_rows = []
2083 tmps = []
2084 for colname in colnames:
2085 if not REGEX_TABLE_DOT_FIELD.match(colname):
2086 tmps.append(None)
2087 else:
2088 (tablename, _the_sep_, fieldname) = colname.partition('.')
2089 table = db[tablename]
2090 field = table[fieldname]
2091 ft = field.type
2092 tmps.append((tablename,fieldname,table,field,ft))
2093 for (i,row) in enumerate(rows):
2094 new_row = Row()
2095 for (j,colname) in enumerate(colnames):
2096 value = row[j]
2097 tmp = tmps[j]
2098 if tmp:
2099 (tablename,fieldname,table,field,ft) = tmp
2100 if tablename in new_row:
2101 colset = new_row[tablename]
2102 else:
2103 colset = new_row[tablename] = Row()
2104 if tablename not in virtualtables:
2105 virtualtables.append(tablename)
2106 value = self.parse_value(value,ft,blob_decode)
2107 if field.filter_out:
2108 value = field.filter_out(value)
2109 colset[fieldname] = value
2110
2111
2112 if ft=='id' and fieldname!='id' and \
2113 not 'id' in table.fields:
2114 colset['id'] = value
2115
2116 if ft == 'id' and not cacheable:
2117
2118
2119
2120 if isinstance(self, GoogleDatastoreAdapter):
2121 id = value.key().id_or_name()
2122 colset[fieldname] = id
2123 colset.gae_item = value
2124 else:
2125 id = value
2126 colset.update_record = RecordUpdater(colset,table,id)
2127 colset.delete_record = RecordDeleter(table,id)
2128 if table._db._lazy_tables:
2129 colset['__get_lazy_reference__'] = LazyReferenceGetter(table, id)
2130 for rfield in table._referenced_by:
2131 referee_link = db._referee_name and \
2132 db._referee_name % dict(
2133 table=rfield.tablename,field=rfield.name)
2134 if referee_link and not referee_link in colset:
2135 colset[referee_link] = LazySet(rfield,id)
2136 else:
2137 if not '_extra' in new_row:
2138 new_row['_extra'] = Row()
2139 new_row['_extra'][colname] = \
2140 self.parse_value(value,
2141 fields[j].type,blob_decode)
2142 new_column_name = \
2143 REGEX_SELECT_AS_PARSER.search(colname)
2144 if not new_column_name is None:
2145 column_name = new_column_name.groups(0)
2146 setattr(new_row,column_name[0],value)
2147 new_rows.append(new_row)
2148 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
2149
2150
2151 for tablename in virtualtables:
2152 table = db[tablename]
2153 fields_virtual = [(f,v) for (f,v) in table.iteritems()
2154 if isinstance(v,FieldVirtual)]
2155 fields_lazy = [(f,v) for (f,v) in table.iteritems()
2156 if isinstance(v,FieldMethod)]
2157 if fields_virtual or fields_lazy:
2158 for row in rowsobj.records:
2159 box = row[tablename]
2160 for f,v in fields_virtual:
2161 try:
2162 box[f] = v.f(row)
2163 except AttributeError:
2164 pass
2165 for f,v in fields_lazy:
2166 try:
2167 box[f] = (v.handler or VirtualCommand)(v.f,row)
2168 except AttributeError:
2169 pass
2170
2171
2172 for item in table.virtualfields:
2173 try:
2174 rowsobj = rowsobj.setvirtualfields(**{tablename:item})
2175 except (KeyError, AttributeError):
2176
2177 pass
2178 return rowsobj
2179
2181 tenant_fieldname = self.db._request_tenant
2182
2183 for tablename in tablenames:
2184 table = self.db[tablename]
2185
2186
2187 if table._common_filter != None:
2188 query = query & table._common_filter(query)
2189
2190
2191 if tenant_fieldname in table:
2192 default = table[tenant_fieldname].default
2193 if not default is None:
2194 newquery = table[tenant_fieldname] == default
2195 if query is None:
2196 query = newquery
2197 else:
2198 query = query & newquery
2199 return query
2200
2201 - def CASE(self,query,t,f):
2202 def represent(x):
2203 types = {type(True):'boolean',type(0):'integer',type(1.0):'double'}
2204 if x is None: return 'NULL'
2205 elif isinstance(x,Expression): return str(x)
2206 else: return self.represent(x,types.get(type(x),'string'))
2207 return Expression(self.db,'CASE WHEN %s THEN %s ELSE %s END' % \
2208 (self.expand(query),represent(t),represent(f)))
2209
2215 drivers = ('sqlite2','sqlite3')
2216
2217 can_select_for_update = None
2218
2220 return "web2py_extract('%s',%s)" % (what, self.expand(field))
2221
2222 @staticmethod
2224 table = {
2225 'year': (0, 4),
2226 'month': (5, 7),
2227 'day': (8, 10),
2228 'hour': (11, 13),
2229 'minute': (14, 16),
2230 'second': (17, 19),
2231 }
2232 try:
2233 if lookup != 'epoch':
2234 (i, j) = table[lookup]
2235 return int(s[i:j])
2236 else:
2237 return time.mktime(datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timetuple())
2238 except:
2239 return None
2240
2241 @staticmethod
2243 return re.compile(expression).search(item) is not None
2244
2245 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
2246 credential_decoder=IDENTITY, driver_args={},
2247 adapter_args={}, do_connect=True, after_connection=None):
2248 self.db = db
2249 self.dbengine = "sqlite"
2250 self.uri = uri
2251 if do_connect: self.find_driver(adapter_args)
2252 self.pool_size = 0
2253 self.folder = folder
2254 self.db_codec = db_codec
2255 self._after_connection = after_connection
2256 self.find_or_make_work_folder()
2257 path_encoding = sys.getfilesystemencoding() \
2258 or locale.getdefaultlocale()[1] or 'utf8'
2259 if uri.startswith('sqlite:memory'):
2260 self.dbpath = ':memory:'
2261 else:
2262 self.dbpath = uri.split('://',1)[1]
2263 if self.dbpath[0] != '/':
2264 if PYTHON_VERSION == 2:
2265 self.dbpath = pjoin(
2266 self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
2267 else:
2268 self.dbpath = pjoin(self.folder, self.dbpath)
2269 if not 'check_same_thread' in driver_args:
2270 driver_args['check_same_thread'] = False
2271 if not 'detect_types' in driver_args and do_connect:
2272 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
2273 def connector(dbpath=self.dbpath, driver_args=driver_args):
2274 return self.driver.Connection(dbpath, **driver_args)
2275 self.connector = connector
2276 if do_connect: self.reconnect()
2277
2283
2285 tablename = table._tablename
2286 return ['DELETE FROM %s;' % tablename,
2287 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
2288
2291
2292 - def REGEXP(self,first,second):
2293 return '(%s REGEXP %s)' % (self.expand(first),
2294 self.expand(second,'string'))
2295
2296 - def select(self, query, fields, attributes):
2297 """
2298 Simulate SELECT ... FOR UPDATE with BEGIN IMMEDIATE TRANSACTION.
2299 Note that the entire database, rather than one record, is locked
2300 (it will be locked eventually anyway by the following UPDATE).
2301 """
2302 if attributes.get('for_update', False) and not 'cache' in attributes:
2303 self.execute('BEGIN IMMEDIATE TRANSACTION;')
2304 return super(SQLiteAdapter, self).select(query, fields, attributes)
2305
2307 drivers = ('sqlite3','sqlite2')
2308
2309 types = copy.copy(BaseAdapter.types)
2310 types.update(geometry='GEOMETRY')
2311
2312 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
2313 credential_decoder=IDENTITY, driver_args={},
2314 adapter_args={}, do_connect=True, srid=4326, after_connection=None):
2315 self.db = db
2316 self.dbengine = "spatialite"
2317 self.uri = uri
2318 if do_connect: self.find_driver(adapter_args)
2319 self.pool_size = 0
2320 self.folder = folder
2321 self.db_codec = db_codec
2322 self._after_connection = after_connection
2323 self.find_or_make_work_folder()
2324 self.srid = srid
2325 path_encoding = sys.getfilesystemencoding() \
2326 or locale.getdefaultlocale()[1] or 'utf8'
2327 if uri.startswith('spatialite:memory'):
2328 self.dbpath = ':memory:'
2329 else:
2330 self.dbpath = uri.split('://',1)[1]
2331 if self.dbpath[0] != '/':
2332 self.dbpath = pjoin(
2333 self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
2334 if not 'check_same_thread' in driver_args:
2335 driver_args['check_same_thread'] = False
2336 if not 'detect_types' in driver_args and do_connect:
2337 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
2338 def connector(dbpath=self.dbpath, driver_args=driver_args):
2339 return self.driver.Connection(dbpath, **driver_args)
2340 self.connector = connector
2341 if do_connect: self.reconnect()
2342
2355
2356
2357
2359 return 'AsGeoJSON(%s,%s,%s)' %(self.expand(first),
2360 second['precision'], second['options'])
2361
2362 - def ST_ASTEXT(self, first):
2363 return 'AsText(%s)' %(self.expand(first))
2364
2368
2372
2376
2380
2384
2386 return 'Simplify(%s,%s)' %(self.expand(first),
2387 self.expand(second, 'double'))
2388
2392
2396
2398 field_is_type = fieldtype.startswith
2399 if field_is_type('geo'):
2400 srid = 4326
2401 geotype, parms = fieldtype[:-1].split('(')
2402 parms = parms.split(',')
2403 if len(parms) >= 2:
2404 schema, srid = parms[:2]
2405
2406 value = "ST_GeomFromText('%s',%s)" %(obj, srid)
2407
2408
2409
2410
2411 return value
2412 return BaseAdapter.represent(self, obj, fieldtype)
2413
2416 drivers = ('zxJDBC_sqlite',)
2417
2418 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
2419 credential_decoder=IDENTITY, driver_args={},
2420 adapter_args={}, do_connect=True, after_connection=None):
2443 self.connector = connector
2444 if do_connect: self.reconnect()
2445
2450
2453
2456 drivers = ('MySQLdb','pymysql', 'mysqlconnector')
2457
2458 commit_on_alter_table = True
2459 support_distributed_transaction = True
2460 types = {
2461 'boolean': 'CHAR(1)',
2462 'string': 'VARCHAR(%(length)s)',
2463 'text': 'LONGTEXT',
2464 'json': 'LONGTEXT',
2465 'password': 'VARCHAR(%(length)s)',
2466 'blob': 'LONGBLOB',
2467 'upload': 'VARCHAR(%(length)s)',
2468 'integer': 'INT',
2469 'bigint': 'BIGINT',
2470 'float': 'FLOAT',
2471 'double': 'DOUBLE',
2472 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2473 'date': 'DATE',
2474 'time': 'TIME',
2475 'datetime': 'DATETIME',
2476 'id': 'INT AUTO_INCREMENT NOT NULL',
2477 'reference': 'INT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2478 'list:integer': 'LONGTEXT',
2479 'list:string': 'LONGTEXT',
2480 'list:reference': 'LONGTEXT',
2481 'big-id': 'BIGINT AUTO_INCREMENT NOT NULL',
2482 'big-reference': 'BIGINT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2483 }
2484
2485 QUOTE_TEMPLATE = "`%s`"
2486
2489
2492
2494 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field),
2495 parameters[0], parameters[1])
2496
2497 - def EPOCH(self, first):
2499
2501 return 'CONCAT(%s)' % ','.join(self.expand(x,'string') for x in items)
2502
2503 - def REGEXP(self,first,second):
2504 return '(%s REGEXP %s)' % (self.expand(first),
2505 self.expand(second,'string'))
2506
2507 - def _drop(self,table,mode):
2508
2509 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,
2510 'SET FOREIGN_KEY_CHECKS=1;']
2511
2513 return 'INSERT INTO %s VALUES (DEFAULT);' % table
2514
2517
2521
2524
2527
2528 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
2529
2530 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2531 credential_decoder=IDENTITY, driver_args={},
2532 adapter_args={}, do_connect=True, after_connection=None):
2533 self.db = db
2534 self.dbengine = "mysql"
2535 self.uri = uri
2536 if do_connect: self.find_driver(adapter_args,uri)
2537 self.pool_size = pool_size
2538 self.folder = folder
2539 self.db_codec = db_codec
2540 self._after_connection = after_connection
2541 self.find_or_make_work_folder()
2542 ruri = uri.split('://',1)[1]
2543 m = self.REGEX_URI.match(ruri)
2544 if not m:
2545 raise SyntaxError(
2546 "Invalid URI string in DAL: %s" % self.uri)
2547 user = credential_decoder(m.group('user'))
2548 if not user:
2549 raise SyntaxError('User required')
2550 password = credential_decoder(m.group('password'))
2551 if not password:
2552 password = ''
2553 host = m.group('host')
2554 if not host:
2555 raise SyntaxError('Host name required')
2556 db = m.group('db')
2557 if not db:
2558 raise SyntaxError('Database name required')
2559 port = int(m.group('port') or '3306')
2560 charset = m.group('charset') or 'utf8'
2561 driver_args.update(db=db,
2562 user=credential_decoder(user),
2563 passwd=credential_decoder(password),
2564 host=host,
2565 port=port,
2566 charset=charset)
2567
2568
2569 def connector(driver_args=driver_args):
2570 return self.driver.connect(**driver_args)
2571 self.connector = connector
2572 if do_connect: self.reconnect()
2573
2575 self.execute('SET FOREIGN_KEY_CHECKS=1;')
2576 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2577
2579 self.execute('select last_insert_id();')
2580 return int(self.cursor.fetchone()[0])
2581
2582
2583 -class PostgreSQLAdapter(BaseAdapter):
2584 drivers = ('psycopg2','pg8000')
2585
2586 support_distributed_transaction = True
2587 types = {
2588 'boolean': 'CHAR(1)',
2589 'string': 'VARCHAR(%(length)s)',
2590 'text': 'TEXT',
2591 'json': 'TEXT',
2592 'password': 'VARCHAR(%(length)s)',
2593 'blob': 'BYTEA',
2594 'upload': 'VARCHAR(%(length)s)',
2595 'integer': 'INTEGER',
2596 'bigint': 'BIGINT',
2597 'float': 'FLOAT',
2598 'double': 'FLOAT8',
2599 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2600 'date': 'DATE',
2601 'time': 'TIME',
2602 'datetime': 'TIMESTAMP',
2603 'id': 'SERIAL PRIMARY KEY',
2604 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2605 'list:integer': 'TEXT',
2606 'list:string': 'TEXT',
2607 'list:reference': 'TEXT',
2608 'geometry': 'GEOMETRY',
2609 'geography': 'GEOGRAPHY',
2610 'big-id': 'BIGSERIAL PRIMARY KEY',
2611 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2612 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2613 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2614
2615 }
2616
2617 QUOTE_TEMPLATE = '%s'
2618
2619 - def varquote(self,name):
2620 return varquote_aux(name,'"%s"')
2621
2622 - def adapt(self,obj):
2623 if self.driver_name == 'psycopg2':
2624 return psycopg2_adapt(obj).getquoted()
2625 elif self.driver_name == 'pg8000':
2626 return "'%s'" % str(obj).replace("%","%%").replace("'","''")
2627 else:
2628 return "'%s'" % str(obj).replace("'","''")
2629
2630 - def sequence_name(self,table):
2631 return '%s_id_Seq' % table
2632
2635
2636 - def ADD(self, first, second):
2637 t = first.type
2638 if t in ('text','string','password', 'json', 'upload','blob'):
2639 return '(%s || %s)' % (self.expand(first), self.expand(second, t))
2640 else:
2641 return '(%s + %s)' % (self.expand(first), self.expand(second, t))
2642
2645
2646 - def prepare(self,key):
2647 self.execute("PREPARE TRANSACTION '%s';" % key)
2648
2649 - def commit_prepared(self,key):
2650 self.execute("COMMIT PREPARED '%s';" % key)
2651
2652 - def rollback_prepared(self,key):
2653 self.execute("ROLLBACK PREPARED '%s';" % key)
2654
2655 - def create_sequence_and_triggers(self, query, table, **args):
2656
2657
2658
2659
2660 self.execute(query)
2661
2662 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
2663
2664 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2665 credential_decoder=IDENTITY, driver_args={},
2666 adapter_args={}, do_connect=True, srid=4326,
2667 after_connection=None):
2668 self.db = db
2669 self.dbengine = "postgres"
2670 self.uri = uri
2671 if do_connect: self.find_driver(adapter_args,uri)
2672 self.pool_size = pool_size
2673 self.folder = folder
2674 self.db_codec = db_codec
2675 self._after_connection = after_connection
2676 self.srid = srid
2677 self.find_or_make_work_folder()
2678 ruri = uri.split('://',1)[1]
2679 m = self.REGEX_URI.match(ruri)
2680 if not m:
2681 raise SyntaxError("Invalid URI string in DAL")
2682 user = credential_decoder(m.group('user'))
2683 if not user:
2684 raise SyntaxError('User required')
2685 password = credential_decoder(m.group('password'))
2686 if not password:
2687 password = ''
2688 host = m.group('host')
2689 if not host:
2690 raise SyntaxError('Host name required')
2691 db = m.group('db')
2692 if not db:
2693 raise SyntaxError('Database name required')
2694 port = m.group('port') or '5432'
2695 sslmode = m.group('sslmode')
2696 if sslmode:
2697 msg = ("dbname='%s' user='%s' host='%s' "
2698 "port=%s password='%s' sslmode='%s'") \
2699 % (db, user, host, port, password, sslmode)
2700 else:
2701 msg = ("dbname='%s' user='%s' host='%s' "
2702 "port=%s password='%s'") \
2703 % (db, user, host, port, password)
2704
2705 if self.driver:
2706 self.__version__ = "%s %s" % (self.driver.__name__,
2707 self.driver.__version__)
2708 else:
2709 self.__version__ = None
2710 def connector(msg=msg,driver_args=driver_args):
2711 return self.driver.connect(msg,**driver_args)
2712 self.connector = connector
2713 if do_connect: self.reconnect()
2714
2715 - def after_connection(self):
2716 self.connection.set_client_encoding('UTF8')
2717 self.execute("SET standard_conforming_strings=on;")
2718 self.try_json()
2719
2720 - def lastrowid(self,table):
2721 self.execute("select currval('%s')" % table._sequence_name)
2722 return int(self.cursor.fetchone()[0])
2723
2724 - def try_json(self):
2725
2726
2727 if self.driver_name == "pg8000":
2728 supports_json = self.connection.server_version >= "9.2.0"
2729 elif (self.driver_name == "psycopg2") and \
2730 (self.driver.__version__ >= "2.0.12"):
2731 supports_json = self.connection.server_version >= 90200
2732 elif self.driver_name == "zxJDBC":
2733 supports_json = self.connection.dbversion >= "9.2.0"
2734 else: supports_json = None
2735 if supports_json:
2736 self.types["json"] = "JSON"
2737 self.native_json = True
2738 else: LOGGER.debug("Your database version does not support the JSON data type (using TEXT instead)")
2739
2740 - def LIKE(self,first,second):
2741 args = (self.expand(first), self.expand(second,'string'))
2742 if not first.type in ('string', 'text', 'json'):
2743 return '(%s LIKE %s)' % (
2744 self.CAST(args[0], 'CHAR(%s)' % first.length), args[1])
2745 else:
2746 return '(%s LIKE %s)' % args
2747
2748 - def ILIKE(self,first,second):
2749 args = (self.expand(first), self.expand(second,'string'))
2750 if not first.type in ('string', 'text', 'json'):
2751 return '(%s LIKE %s)' % (
2752 self.CAST(args[0], 'CHAR(%s)' % first.length), args[1])
2753 else:
2754 return '(%s ILIKE %s)' % args
2755
2756 - def REGEXP(self,first,second):
2757 return '(%s ~ %s)' % (self.expand(first),
2758 self.expand(second,'string'))
2759
2760 - def STARTSWITH(self,first,second):
2761 return '(%s ILIKE %s)' % (self.expand(first),
2762 self.expand(second+'%','string'))
2763
2764 - def ENDSWITH(self,first,second):
2765 return '(%s ILIKE %s)' % (self.expand(first),
2766 self.expand('%'+second,'string'))
2767
2768
2769
2770 - def ST_ASGEOJSON(self, first, second):
2771 """
2772 http://postgis.org/docs/ST_AsGeoJSON.html
2773 """
2774 return 'ST_AsGeoJSON(%s,%s,%s,%s)' %(second['version'],
2775 self.expand(first), second['precision'], second['options'])
2776
2777 - def ST_ASTEXT(self, first):
2778 """
2779 http://postgis.org/docs/ST_AsText.html
2780 """
2781 return 'ST_AsText(%s)' %(self.expand(first))
2782
2783 - def ST_X(self, first):
2784 """
2785 http://postgis.org/docs/ST_X.html
2786 """
2787 return 'ST_X(%s)' %(self.expand(first))
2788
2789 - def ST_Y(self, first):
2790 """
2791 http://postgis.org/docs/ST_Y.html
2792 """
2793 return 'ST_Y(%s)' %(self.expand(first))
2794
2795 - def ST_CONTAINS(self, first, second):
2796 """
2797 http://postgis.org/docs/ST_Contains.html
2798 """
2799 return 'ST_Contains(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2800
2801 - def ST_DISTANCE(self, first, second):
2802 """
2803 http://postgis.org/docs/ST_Distance.html
2804 """
2805 return 'ST_Distance(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2806
2807 - def ST_EQUALS(self, first, second):
2808 """
2809 http://postgis.org/docs/ST_Equals.html
2810 """
2811 return 'ST_Equals(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2812
2813 - def ST_INTERSECTS(self, first, second):
2814 """
2815 http://postgis.org/docs/ST_Intersects.html
2816 """
2817 return 'ST_Intersects(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2818
2819 - def ST_OVERLAPS(self, first, second):
2820 """
2821 http://postgis.org/docs/ST_Overlaps.html
2822 """
2823 return 'ST_Overlaps(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2824
2825 - def ST_SIMPLIFY(self, first, second):
2826 """
2827 http://postgis.org/docs/ST_Simplify.html
2828 """
2829 return 'ST_Simplify(%s,%s)' %(self.expand(first), self.expand(second, 'double'))
2830
2831 - def ST_TOUCHES(self, first, second):
2832 """
2833 http://postgis.org/docs/ST_Touches.html
2834 """
2835 return 'ST_Touches(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2836
2837 - def ST_WITHIN(self, first, second):
2838 """
2839 http://postgis.org/docs/ST_Within.html
2840 """
2841 return 'ST_Within(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2842
2843 - def represent(self, obj, fieldtype):
2844 field_is_type = fieldtype.startswith
2845 if field_is_type('geo'):
2846 srid = 4326
2847 geotype, parms = fieldtype[:-1].split('(')
2848 parms = parms.split(',')
2849 if len(parms) >= 2:
2850 schema, srid = parms[:2]
2851 if field_is_type('geometry'):
2852 value = "ST_GeomFromText('%s',%s)" %(obj, srid)
2853 elif field_is_type('geography'):
2854 value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
2855
2856
2857 return value
2858 return BaseAdapter.represent(self, obj, fieldtype)
2859
2860 -class NewPostgreSQLAdapter(PostgreSQLAdapter):
2861 drivers = ('psycopg2','pg8000')
2862
2863 types = {
2864 'boolean': 'CHAR(1)',
2865 'string': 'VARCHAR(%(length)s)',
2866 'text': 'TEXT',
2867 'json': 'TEXT',
2868 'password': 'VARCHAR(%(length)s)',
2869 'blob': 'BYTEA',
2870 'upload': 'VARCHAR(%(length)s)',
2871 'integer': 'INTEGER',
2872 'bigint': 'BIGINT',
2873 'float': 'FLOAT',
2874 'double': 'FLOAT8',
2875 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2876 'date': 'DATE',
2877 'time': 'TIME',
2878 'datetime': 'TIMESTAMP',
2879 'id': 'SERIAL PRIMARY KEY',
2880 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2881 'list:integer': 'BIGINT[]',
2882 'list:string': 'TEXT[]',
2883 'list:reference': 'BIGINT[]',
2884 'geometry': 'GEOMETRY',
2885 'geography': 'GEOGRAPHY',
2886 'big-id': 'BIGSERIAL PRIMARY KEY',
2887 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2888 }
2889
2890 - def parse_list_integers(self, value, field_type):
2892
2893 - def parse_list_references(self, value, field_type):
2894 return [self.parse_reference(r, field_type[5:]) for r in value]
2895
2896 - def parse_list_strings(self, value, field_type):
2898
2899 - def represent(self, obj, fieldtype):
2900 field_is_type = fieldtype.startswith
2901 if field_is_type('list:'):
2902 if not obj:
2903 obj = []
2904 elif not isinstance(obj, (list, tuple)):
2905 obj = [obj]
2906 if field_is_type('list:string'):
2907 obj = map(str,obj)
2908 else:
2909 obj = map(int,obj)
2910 return 'ARRAY[%s]' % ','.join(repr(item) for item in obj)
2911 return BaseAdapter.represent(self, obj, fieldtype)
2912
2913
2914 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
2915 drivers = ('zxJDBC',)
2916
2917 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
2918
2919 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2920 credential_decoder=IDENTITY, driver_args={},
2921 adapter_args={}, do_connect=True, after_connection=None ):
2922 self.db = db
2923 self.dbengine = "postgres"
2924 self.uri = uri
2925 if do_connect: self.find_driver(adapter_args,uri)
2926 self.pool_size = pool_size
2927 self.folder = folder
2928 self.db_codec = db_codec
2929 self._after_connection = after_connection
2930 self.find_or_make_work_folder()
2931 ruri = uri.split('://',1)[1]
2932 m = self.REGEX_URI.match(ruri)
2933 if not m:
2934 raise SyntaxError("Invalid URI string in DAL")
2935 user = credential_decoder(m.group('user'))
2936 if not user:
2937 raise SyntaxError('User required')
2938 password = credential_decoder(m.group('password'))
2939 if not password:
2940 password = ''
2941 host = m.group('host')
2942 if not host:
2943 raise SyntaxError('Host name required')
2944 db = m.group('db')
2945 if not db:
2946 raise SyntaxError('Database name required')
2947 port = m.group('port') or '5432'
2948 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
2949 def connector(msg=msg,driver_args=driver_args):
2950 return self.driver.connect(*msg,**driver_args)
2951 self.connector = connector
2952 if do_connect: self.reconnect()
2953
2954 - def after_connection(self):
2955 self.connection.set_client_encoding('UTF8')
2956 self.execute('BEGIN;')
2957 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
2958 self.try_json()
2959
2962 drivers = ('cx_Oracle',)
2963
2964 commit_on_alter_table = False
2965 types = {
2966 'boolean': 'CHAR(1)',
2967 'string': 'VARCHAR2(%(length)s)',
2968 'text': 'CLOB',
2969 'json': 'CLOB',
2970 'password': 'VARCHAR2(%(length)s)',
2971 'blob': 'CLOB',
2972 'upload': 'VARCHAR2(%(length)s)',
2973 'integer': 'INT',
2974 'bigint': 'NUMBER',
2975 'float': 'FLOAT',
2976 'double': 'BINARY_DOUBLE',
2977 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2978 'date': 'DATE',
2979 'time': 'CHAR(8)',
2980 'datetime': 'DATE',
2981 'id': 'NUMBER PRIMARY KEY',
2982 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2983 'list:integer': 'CLOB',
2984 'list:string': 'CLOB',
2985 'list:reference': 'CLOB',
2986 'big-id': 'NUMBER PRIMARY KEY',
2987 'big-reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2988 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2989 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2990 }
2991
2993 return '%s_sequence' % tablename
2994
2996 return '%s_trigger' % tablename
2997
2999 return 'LEFT OUTER JOIN'
3000
3002 return 'dbms_random.value'
3003
3004 - def NOT_NULL(self,default,field_type):
3005 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
3006
3007 - def _drop(self,table,mode):
3010
3011 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3012 if limitby:
3013 (lmin, lmax) = limitby
3014 if len(sql_w) > 1:
3015 sql_w_row = sql_w + ' AND w_row > %i' % lmin
3016 else:
3017 sql_w_row = 'WHERE w_row > %i' % lmin
3018 return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
3019 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3020
3026
3028 if fieldtype == 'blob':
3029 obj = base64.b64encode(str(obj))
3030 return ":CLOB('%s')" % obj
3031 elif fieldtype == 'date':
3032 if isinstance(obj, (datetime.date, datetime.datetime)):
3033 obj = obj.isoformat()[:10]
3034 else:
3035 obj = str(obj)
3036 return "to_date('%s','yyyy-mm-dd')" % obj
3037 elif fieldtype == 'datetime':
3038 if isinstance(obj, datetime.datetime):
3039 obj = obj.isoformat()[:19].replace('T',' ')
3040 elif isinstance(obj, datetime.date):
3041 obj = obj.isoformat()[:10]+' 00:00:00'
3042 else:
3043 obj = str(obj)
3044 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
3045 return None
3046
3047 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3048 credential_decoder=IDENTITY, driver_args={},
3049 adapter_args={}, do_connect=True, after_connection=None):
3050 self.db = db
3051 self.dbengine = "oracle"
3052 self.uri = uri
3053 if do_connect: self.find_driver(adapter_args,uri)
3054 self.pool_size = pool_size
3055 self.folder = folder
3056 self.db_codec = db_codec
3057 self._after_connection = after_connection
3058 self.find_or_make_work_folder()
3059 ruri = uri.split('://',1)[1]
3060 if not 'threaded' in driver_args:
3061 driver_args['threaded']=True
3062 def connector(uri=ruri,driver_args=driver_args):
3063 return self.driver.connect(uri,**driver_args)
3064 self.connector = connector
3065 if do_connect: self.reconnect()
3066
3068 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
3069 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
3070
3071 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
3072
3073 - def execute(self, command, args=None):
3074 args = args or []
3075 i = 1
3076 while True:
3077 m = self.oracle_fix.match(command)
3078 if not m:
3079 break
3080 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
3081 args.append(m.group('clob')[6:-2].replace("''", "'"))
3082 i += 1
3083 if command[-1:]==';':
3084 command = command[:-1]
3085 return self.log_execute(command, args)
3086
3088 tablename = table._tablename
3089 id_name = table._id.name
3090 sequence_name = table._sequence_name
3091 trigger_name = table._trigger_name
3092 self.execute(query)
3093 self.execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE MINVALUE -1;' % sequence_name)
3094 self.execute("""
3095 CREATE OR REPLACE TRIGGER %(trigger_name)s BEFORE INSERT ON %(tablename)s FOR EACH ROW
3096 DECLARE
3097 curr_val NUMBER;
3098 diff_val NUMBER;
3099 PRAGMA autonomous_transaction;
3100 BEGIN
3101 IF :NEW.%(id)s IS NOT NULL THEN
3102 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
3103 diff_val := :NEW.%(id)s - curr_val - 1;
3104 IF diff_val != 0 THEN
3105 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by '|| diff_val;
3106 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
3107 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by 1';
3108 END IF;
3109 END IF;
3110 SELECT %(sequence_name)s.nextval INTO :NEW.%(id)s FROM DUAL;
3111 END;
3112 """ % dict(trigger_name=trigger_name, tablename=tablename,
3113 sequence_name=sequence_name,id=id_name))
3114
3119
3120
3121
3122
3123
3124
3125
3126
3127
3128
3130 if any(x[1]==cx_Oracle.CLOB for x in self.cursor.description):
3131 return [tuple([(c.read() if type(c) == cx_Oracle.LOB else c) \
3132 for c in r]) for r in self.cursor]
3133 else:
3134 return self.cursor.fetchall()
3135
3137 drivers = ('pyodbc',)
3138 T_SEP = 'T'
3139
3140 QUOTE_TEMPLATE = "[%s]"
3141
3142 types = {
3143 'boolean': 'BIT',
3144 'string': 'VARCHAR(%(length)s)',
3145 'text': 'TEXT',
3146 'json': 'TEXT',
3147 'password': 'VARCHAR(%(length)s)',
3148 'blob': 'IMAGE',
3149 'upload': 'VARCHAR(%(length)s)',
3150 'integer': 'INT',
3151 'bigint': 'BIGINT',
3152 'float': 'FLOAT',
3153 'double': 'FLOAT',
3154 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3155 'date': 'DATETIME',
3156 'time': 'CHAR(8)',
3157 'datetime': 'DATETIME',
3158 'id': 'INT IDENTITY PRIMARY KEY',
3159 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3160 'list:integer': 'TEXT',
3161 'list:string': 'TEXT',
3162 'list:reference': 'TEXT',
3163 'geometry': 'geometry',
3164 'geography': 'geography',
3165 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3166 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3167 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3168 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3169 }
3170
3172 return '; ALTER TABLE %s ADD ' % tablename
3173
3176
3178 return "DATEPART(%s,%s)" % (what, self.expand(field))
3179
3181 return 'LEFT OUTER JOIN'
3182
3185
3188
3189 - def CAST(self, first, second):
3191
3193 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
3194
3196 return 'PRIMARY KEY CLUSTERED (%s)' % key
3197
3199 if what == 'LENGTH':
3200 what = 'LEN'
3201 return "%s(%s)" % (what, self.expand(first))
3202
3203
3204 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3205 if limitby:
3206 (lmin, lmax) = limitby
3207 sql_s += ' TOP %i' % lmax
3208 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3209
3210 TRUE = 1
3211 FALSE = 0
3212
3213 REGEX_DSN = re.compile('^(?P<dsn>.+)$')
3214 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$')
3215 REGEX_ARGPATTERN = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
3216
3217 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3218 credential_decoder=IDENTITY, driver_args={},
3219 adapter_args={}, do_connect=True, srid=4326,
3220 after_connection=None):
3221 self.db = db
3222 self.dbengine = "mssql"
3223 self.uri = uri
3224 if do_connect: self.find_driver(adapter_args,uri)
3225 self.pool_size = pool_size
3226 self.folder = folder
3227 self.db_codec = db_codec
3228 self._after_connection = after_connection
3229 self.srid = srid
3230 self.find_or_make_work_folder()
3231
3232 ruri = uri.split('://',1)[1]
3233 if '@' not in ruri:
3234 try:
3235 m = self.REGEX_DSN.match(ruri)
3236 if not m:
3237 raise SyntaxError(
3238 'Parsing uri string(%s) has no result' % self.uri)
3239 dsn = m.group('dsn')
3240 if not dsn:
3241 raise SyntaxError('DSN required')
3242 except SyntaxError:
3243 e = sys.exc_info()[1]
3244 LOGGER.error('NdGpatch error')
3245 raise e
3246
3247 cnxn = dsn
3248 else:
3249 m = self.REGEX_URI.match(ruri)
3250 if not m:
3251 raise SyntaxError(
3252 "Invalid URI string in DAL: %s" % self.uri)
3253 user = credential_decoder(m.group('user'))
3254 if not user:
3255 raise SyntaxError('User required')
3256 password = credential_decoder(m.group('password'))
3257 if not password:
3258 password = ''
3259 host = m.group('host')
3260 if not host:
3261 raise SyntaxError('Host name required')
3262 db = m.group('db')
3263 if not db:
3264 raise SyntaxError('Database name required')
3265 port = m.group('port') or '1433'
3266
3267
3268
3269 argsdict = { 'DRIVER':'{SQL Server}' }
3270 urlargs = m.group('urlargs') or ''
3271 for argmatch in self.REGEX_ARGPATTERN.finditer(urlargs):
3272 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
3273 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.iteritems()])
3274 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
3275 % (host, port, db, user, password, urlargs)
3276 def connector(cnxn=cnxn,driver_args=driver_args):
3277 return self.driver.connect(cnxn,**driver_args)
3278 self.connector = connector
3279 if do_connect: self.reconnect()
3280
3282
3283 self.execute('SELECT SCOPE_IDENTITY();')
3284 return long(self.cursor.fetchone()[0])
3285
3286 - def rowslice(self,rows,minimum=0,maximum=None):
3287 if maximum is None:
3288 return rows[minimum:]
3289 return rows[minimum:maximum]
3290
3291 - def EPOCH(self, first):
3292 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3293
3296
3297
3298
3299
3300
3301 - def ST_ASTEXT(self, first):
3302 return '%s.STAsText()' %(self.expand(first))
3303
3306
3309
3312
3315
3318
3319
3320
3323
3326
3328 field_is_type = fieldtype.startswith
3329 if field_is_type('geometry'):
3330 srid = 0
3331 geotype, parms = fieldtype[:-1].split('(')
3332 if parms:
3333 srid = parms
3334 return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
3335 elif fieldtype == 'geography':
3336 srid = 4326
3337 geotype, parms = fieldtype[:-1].split('(')
3338 if parms:
3339 srid = parms
3340 return "geography::STGeomFromText('%s',%s)" %(obj, srid)
3341
3342
3343 return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
3344 return BaseAdapter.represent(self, obj, fieldtype)
3345
3348 """ experimental support for pagination in MSSQL"""
3349 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3350 if limitby:
3351 (lmin, lmax) = limitby
3352 if lmin == 0:
3353 sql_s += ' TOP %i' % lmax
3354 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3355 lmin += 1
3356 sql_o_inner = sql_o[sql_o.find('ORDER BY ')+9:]
3357 sql_g_inner = sql_o[:sql_o.find('ORDER BY ')]
3358 sql_f_outer = ['f_%s' % f for f in range(len(sql_f.split(',')))]
3359 sql_f_inner = [f for f in sql_f.split(',')]
3360 sql_f_iproxy = ['%s AS %s' % (o, n) for (o, n) in zip(sql_f_inner, sql_f_outer)]
3361 sql_f_iproxy = ', '.join(sql_f_iproxy)
3362 sql_f_oproxy = ', '.join(sql_f_outer)
3363 return 'SELECT %s %s FROM (SELECT %s ROW_NUMBER() OVER (ORDER BY %s) AS w_row, %s FROM %s%s%s) TMP WHERE w_row BETWEEN %i AND %s;' % (sql_s,sql_f_oproxy,sql_s,sql_f,sql_f_iproxy,sql_t,sql_w,sql_g_inner,lmin,lmax)
3364 return 'SELECT %s %s FROM %s%s%s;' % (sql_s,sql_f,sql_t,sql_w,sql_o)
3365 - def rowslice(self,rows,minimum=0,maximum=None):
3367
3370 drivers = ('pyodbc',)
3371
3372 types = {
3373 'boolean': 'CHAR(1)',
3374 'string': 'NVARCHAR(%(length)s)',
3375 'text': 'NTEXT',
3376 'json': 'NTEXT',
3377 'password': 'NVARCHAR(%(length)s)',
3378 'blob': 'IMAGE',
3379 'upload': 'NVARCHAR(%(length)s)',
3380 'integer': 'INT',
3381 'bigint': 'BIGINT',
3382 'float': 'FLOAT',
3383 'double': 'FLOAT',
3384 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3385 'date': 'DATETIME',
3386 'time': 'CHAR(8)',
3387 'datetime': 'DATETIME',
3388 'id': 'INT IDENTITY PRIMARY KEY',
3389 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3390 'list:integer': 'NTEXT',
3391 'list:string': 'NTEXT',
3392 'list:reference': 'NTEXT',
3393 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3394 'big-reference': 'BIGINT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3395 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3396 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3397 }
3398
3400 value = BaseAdapter.represent(self, obj, fieldtype)
3401 if fieldtype in ('string','text', 'json') and value[:1]=="'":
3402 value = 'N'+value
3403 return value
3404
3407
3409 drivers = ('pyodbc',)
3410 T_SEP = ' '
3411
3412 types = {
3413 'boolean': 'BOOLEAN',
3414 'string': 'VARCHAR(%(length)s)',
3415 'text': 'BYTEA',
3416 'json': 'VARCHAR(%(length)s)',
3417 'password': 'VARCHAR(%(length)s)',
3418 'blob': 'BYTEA',
3419 'upload': 'VARCHAR(%(length)s)',
3420 'integer': 'INT',
3421 'bigint': 'BIGINT',
3422 'float': 'FLOAT',
3423 'double': 'DOUBLE PRECISION',
3424 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
3425 'date': 'DATE',
3426 'time': 'TIME',
3427 'datetime': 'DATETIME',
3428 'id': 'IDENTITY',
3429 'reference': 'INT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3430 'list:integer': 'BYTEA',
3431 'list:string': 'BYTEA',
3432 'list:reference': 'BYTEA',
3433 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3434 }
3435
3436
3438 return "DATE_PART('%s', TIMESTAMP %s)" % (what, self.expand(first))
3439
3441 tablename = table._tablename
3442 return ['TRUNCATE %s %s;' % (tablename, mode or '')]
3443
3444 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3445 if limitby:
3446 (lmin, lmax) = limitby
3447 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
3448 return 'SELECT %s %s FROM %s%s%s;' % \
3449 (sql_s, sql_f, sql_t, sql_w, sql_o)
3450
3452 self.execute('SELECT LAST_INSERT_ID();')
3453 return long(self.cursor.fetchone()[0])
3454
3457
3459 drivers = ('Sybase',)
3460
3461 types = {
3462 'boolean': 'BIT',
3463 'string': 'CHAR VARYING(%(length)s)',
3464 'text': 'TEXT',
3465 'json': 'TEXT',
3466 'password': 'CHAR VARYING(%(length)s)',
3467 'blob': 'IMAGE',
3468 'upload': 'CHAR VARYING(%(length)s)',
3469 'integer': 'INT',
3470 'bigint': 'BIGINT',
3471 'float': 'FLOAT',
3472 'double': 'FLOAT',
3473 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3474 'date': 'DATETIME',
3475 'time': 'CHAR(8)',
3476 'datetime': 'DATETIME',
3477 'id': 'INT IDENTITY PRIMARY KEY',
3478 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3479 'list:integer': 'TEXT',
3480 'list:string': 'TEXT',
3481 'list:reference': 'TEXT',
3482 'geometry': 'geometry',
3483 'geography': 'geography',
3484 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3485 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3486 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3487 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3488 }
3489
3490
3491 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3492 credential_decoder=IDENTITY, driver_args={},
3493 adapter_args={}, do_connect=True, srid=4326,
3494 after_connection=None):
3495 self.db = db
3496 self.dbengine = "sybase"
3497 self.uri = uri
3498 if do_connect: self.find_driver(adapter_args,uri)
3499 self.pool_size = pool_size
3500 self.folder = folder
3501 self.db_codec = db_codec
3502 self._after_connection = after_connection
3503 self.srid = srid
3504 self.find_or_make_work_folder()
3505
3506 ruri = uri.split('://',1)[1]
3507 if '@' not in ruri:
3508 try:
3509 m = self.REGEX_DSN.match(ruri)
3510 if not m:
3511 raise SyntaxError(
3512 'Parsing uri string(%s) has no result' % self.uri)
3513 dsn = m.group('dsn')
3514 if not dsn:
3515 raise SyntaxError('DSN required')
3516 except SyntaxError:
3517 e = sys.exc_info()[1]
3518 LOGGER.error('NdGpatch error')
3519 raise e
3520 else:
3521 m = self.REGEX_URI.match(uri)
3522 if not m:
3523 raise SyntaxError(
3524 "Invalid URI string in DAL: %s" % self.uri)
3525 user = credential_decoder(m.group('user'))
3526 if not user:
3527 raise SyntaxError('User required')
3528 password = credential_decoder(m.group('password'))
3529 if not password:
3530 password = ''
3531 host = m.group('host')
3532 if not host:
3533 raise SyntaxError('Host name required')
3534 db = m.group('db')
3535 if not db:
3536 raise SyntaxError('Database name required')
3537 port = m.group('port') or '1433'
3538
3539 dsn = 'sybase:host=%s:%s;dbname=%s' % (host,port,db)
3540
3541 driver_args.update(user = credential_decoder(user),
3542 password = credential_decoder(password))
3543
3544 def connector(dsn=dsn,driver_args=driver_args):
3545 return self.driver.connect(dsn,**driver_args)
3546 self.connector = connector
3547 if do_connect: self.reconnect()
3548
3551 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
3552
3553 commit_on_alter_table = False
3554 support_distributed_transaction = True
3555 types = {
3556 'boolean': 'CHAR(1)',
3557 'string': 'VARCHAR(%(length)s)',
3558 'text': 'BLOB SUB_TYPE 1',
3559 'json': 'BLOB SUB_TYPE 1',
3560 'password': 'VARCHAR(%(length)s)',
3561 'blob': 'BLOB SUB_TYPE 0',
3562 'upload': 'VARCHAR(%(length)s)',
3563 'integer': 'INTEGER',
3564 'bigint': 'BIGINT',
3565 'float': 'FLOAT',
3566 'double': 'DOUBLE PRECISION',
3567 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
3568 'date': 'DATE',
3569 'time': 'TIME',
3570 'datetime': 'TIMESTAMP',
3571 'id': 'INTEGER PRIMARY KEY',
3572 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3573 'list:integer': 'BLOB SUB_TYPE 1',
3574 'list:string': 'BLOB SUB_TYPE 1',
3575 'list:reference': 'BLOB SUB_TYPE 1',
3576 'big-id': 'BIGINT PRIMARY KEY',
3577 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3578 }
3579
3581 return 'genid_%s' % tablename
3582
3584 return 'trg_id_%s' % tablename
3585
3588
3589 - def EPOCH(self, first):
3590 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3591
3592 - def NOT_NULL(self,default,field_type):
3593 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
3594
3596 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
3597
3600
3601 - def CONTAINS(self,first,second,case_sensitive=False):
3607
3608 - def _drop(self,table,mode):
3611
3612 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3613 if limitby:
3614 (lmin, lmax) = limitby
3615 sql_s = ' FIRST %i SKIP %i %s' % (lmax - lmin, lmin, sql_s)
3616 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3617
3619 return ['DELETE FROM %s;' % table._tablename,
3620 'SET GENERATOR %s TO 0;' % table._sequence_name]
3621
3622 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$')
3623
3624 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3625 credential_decoder=IDENTITY, driver_args={},
3626 adapter_args={}, do_connect=True, after_connection=None):
3627 self.db = db
3628 self.dbengine = "firebird"
3629 self.uri = uri
3630 if do_connect: self.find_driver(adapter_args,uri)
3631 self.pool_size = pool_size
3632 self.folder = folder
3633 self.db_codec = db_codec
3634 self._after_connection = after_connection
3635 self.find_or_make_work_folder()
3636 ruri = uri.split('://',1)[1]
3637 m = self.REGEX_URI.match(ruri)
3638 if not m:
3639 raise SyntaxError("Invalid URI string in DAL: %s" % self.uri)
3640 user = credential_decoder(m.group('user'))
3641 if not user:
3642 raise SyntaxError('User required')
3643 password = credential_decoder(m.group('password'))
3644 if not password:
3645 password = ''
3646 host = m.group('host')
3647 if not host:
3648 raise SyntaxError('Host name required')
3649 port = int(m.group('port') or 3050)
3650 db = m.group('db')
3651 if not db:
3652 raise SyntaxError('Database name required')
3653 charset = m.group('charset') or 'UTF8'
3654 driver_args.update(dsn='%s/%s:%s' % (host,port,db),
3655 user = credential_decoder(user),
3656 password = credential_decoder(password),
3657 charset = charset)
3658
3659 def connector(driver_args=driver_args):
3660 return self.driver.connect(**driver_args)
3661 self.connector = connector
3662 if do_connect: self.reconnect()
3663
3672
3677
3680 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
3681
3682 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$')
3683
3684 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3685 credential_decoder=IDENTITY, driver_args={},
3686 adapter_args={}, do_connect=True, after_connection=None):
3687 self.db = db
3688 self.dbengine = "firebird"
3689 self.uri = uri
3690 if do_connect: self.find_driver(adapter_args,uri)
3691 self.pool_size = pool_size
3692 self.folder = folder
3693 self.db_codec = db_codec
3694 self._after_connection = after_connection
3695 self.find_or_make_work_folder()
3696 ruri = uri.split('://',1)[1]
3697 m = self.REGEX_URI.match(ruri)
3698 if not m:
3699 raise SyntaxError(
3700 "Invalid URI string in DAL: %s" % self.uri)
3701 user = credential_decoder(m.group('user'))
3702 if not user:
3703 raise SyntaxError('User required')
3704 password = credential_decoder(m.group('password'))
3705 if not password:
3706 password = ''
3707 pathdb = m.group('path')
3708 if not pathdb:
3709 raise SyntaxError('Path required')
3710 charset = m.group('charset')
3711 if not charset:
3712 charset = 'UTF8'
3713 host = ''
3714 driver_args.update(host=host,
3715 database=pathdb,
3716 user=credential_decoder(user),
3717 password=credential_decoder(password),
3718 charset=charset)
3719
3720 def connector(driver_args=driver_args):
3721 return self.driver.connect(**driver_args)
3722 self.connector = connector
3723 if do_connect: self.reconnect()
3724
3830
3835
3838
3850
3852 drivers = ('pyodbc',)
3853
3854 types = {
3855 'boolean': 'CHAR(1)',
3856 'string': 'VARCHAR(%(length)s)',
3857 'text': 'CLOB',
3858 'json': 'CLOB',
3859 'password': 'VARCHAR(%(length)s)',
3860 'blob': 'BLOB',
3861 'upload': 'VARCHAR(%(length)s)',
3862 'integer': 'INT',
3863 'bigint': 'BIGINT',
3864 'float': 'REAL',
3865 'double': 'DOUBLE',
3866 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3867 'date': 'DATE',
3868 'time': 'TIME',
3869 'datetime': 'TIMESTAMP',
3870 'id': 'INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
3871 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3872 'list:integer': 'CLOB',
3873 'list:string': 'CLOB',
3874 'list:reference': 'CLOB',
3875 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
3876 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3877 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3878 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3879 }
3880
3882 return 'LEFT OUTER JOIN'
3883
3886
3887 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3888 if limitby:
3889 (lmin, lmax) = limitby
3890 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
3891 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3892
3894 if fieldtype == 'blob':
3895 obj = base64.b64encode(str(obj))
3896 return "BLOB('%s')" % obj
3897 elif fieldtype == 'datetime':
3898 if isinstance(obj, datetime.datetime):
3899 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
3900 elif isinstance(obj, datetime.date):
3901 obj = obj.isoformat()[:10]+'-00.00.00'
3902 return "'%s'" % obj
3903 return None
3904
3905 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3906 credential_decoder=IDENTITY, driver_args={},
3907 adapter_args={}, do_connect=True, after_connection=None):
3920 self.connector = connector
3921 if do_connect: self.reconnect()
3922
3924 if command[-1:]==';':
3925 command = command[:-1]
3926 return self.log_execute(command)
3927
3929 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
3930 return long(self.cursor.fetchone()[0])
3931
3932 - def rowslice(self,rows,minimum=0,maximum=None):
3933 if maximum is None:
3934 return rows[minimum:]
3935 return rows[minimum:maximum]
3936
3939 drivers = ('pyodbc',)
3940
3941 types = {
3942 'boolean': 'CHAR(1)',
3943 'string': 'VARCHAR(%(length)s)',
3944 'text': 'CLOB',
3945 'json': 'CLOB',
3946 'password': 'VARCHAR(%(length)s)',
3947 'blob': 'BLOB',
3948 'upload': 'VARCHAR(%(length)s)',
3949 'integer': 'INT',
3950 'bigint': 'BIGINT',
3951 'float': 'REAL',
3952 'double': 'DOUBLE',
3953 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3954 'date': 'DATE',
3955 'time': 'TIME',
3956 'datetime': 'TIMESTAMP',
3957
3958
3959 'id': 'INT GENERATED ALWAYS AS IDENTITY',
3960 'reference': 'INT',
3961 'list:integer': 'CLOB',
3962 'list:string': 'CLOB',
3963 'list:reference': 'CLOB',
3964 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY',
3965 'big-reference': 'BIGINT',
3966 'reference FK': ' REFERENCES %(foreign_key)s',
3967 'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
3968 }
3969
3970 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3971 credential_decoder=IDENTITY, driver_args={},
3972 adapter_args={}, do_connect=True, after_connection=None):
3985 self.connector = connector
3986 if do_connect: self.reconnect()
3987
3989 return 'LEFT OUTER JOIN'
3990
3991
3992 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3993 if limitby:
3994 (lmin, lmax) = limitby
3995 sql_s += ' TOP %i' % lmax
3996 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3997
3999 tablename = table._tablename
4000 return ['DELETE FROM %s ALL;' % (tablename)]
4001
4002 INGRES_SEQNAME='ii***lineitemsequence'
4007 drivers = ('pyodbc',)
4008
4009 types = {
4010 'boolean': 'CHAR(1)',
4011 'string': 'VARCHAR(%(length)s)',
4012 'text': 'CLOB',
4013 'json': 'CLOB',
4014 'password': 'VARCHAR(%(length)s)',
4015 'blob': 'BLOB',
4016 'upload': 'VARCHAR(%(length)s)',
4017 'integer': 'INTEGER4',
4018 'bigint': 'BIGINT',
4019 'float': 'FLOAT',
4020 'double': 'FLOAT8',
4021 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
4022 'date': 'ANSIDATE',
4023 'time': 'TIME WITHOUT TIME ZONE',
4024 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
4025 'id': 'int not null unique with default next value for %s' % INGRES_SEQNAME,
4026 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4027 'list:integer': 'CLOB',
4028 'list:string': 'CLOB',
4029 'list:reference': 'CLOB',
4030 'big-id': 'bigint not null unique with default next value for %s' % INGRES_SEQNAME,
4031 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4032 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4033 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
4034 }
4035
4037 return 'LEFT OUTER JOIN'
4038
4041
4042 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4043 if limitby:
4044 (lmin, lmax) = limitby
4045 fetch_amt = lmax - lmin
4046 if fetch_amt:
4047 sql_s += ' FIRST %d ' % (fetch_amt, )
4048 if lmin:
4049
4050 sql_o += ' OFFSET %d' % (lmin, )
4051 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4052
4053 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4054 credential_decoder=IDENTITY, driver_args={},
4055 adapter_args={}, do_connect=True, after_connection=None):
4056 self.db = db
4057 self.dbengine = "ingres"
4058 self._driver = pyodbc
4059 self.uri = uri
4060 if do_connect: self.find_driver(adapter_args,uri)
4061 self.pool_size = pool_size
4062 self.folder = folder
4063 self.db_codec = db_codec
4064 self._after_connection = after_connection
4065 self.find_or_make_work_folder()
4066 connstr = uri.split(':', 1)[1]
4067
4068 connstr = connstr.lstrip()
4069 while connstr.startswith('/'):
4070 connstr = connstr[1:]
4071 if '=' in connstr:
4072
4073 ruri = connstr
4074 else:
4075
4076 database_name = connstr
4077 default_driver_name = 'Ingres'
4078 vnode = '(local)'
4079 servertype = 'ingres'
4080 ruri = 'Driver={%s};Server=%s;Database=%s' % (default_driver_name, vnode, database_name)
4081 def connector(cnxn=ruri,driver_args=driver_args):
4082 return self.driver.connect(cnxn,**driver_args)
4083
4084 self.connector = connector
4085
4086
4087 if do_connect: self.reconnect()
4088
4090
4091
4092
4093 if hasattr(table,'_primarykey'):
4094 modify_tbl_sql = 'modify %s to btree unique on %s' % \
4095 (table._tablename,
4096 ', '.join(["'%s'" % x for x in table.primarykey]))
4097 self.execute(modify_tbl_sql)
4098 else:
4099 tmp_seqname='%s_iisq' % table._tablename
4100 query=query.replace(INGRES_SEQNAME, tmp_seqname)
4101 self.execute('create sequence %s' % tmp_seqname)
4102 self.execute(query)
4103 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
4104
4105
4107 tmp_seqname='%s_iisq' % table
4108 self.execute('select current value for %s' % tmp_seqname)
4109 return long(self.cursor.fetchone()[0])
4110
4113
4114 drivers = ('pyodbc',)
4115
4116 types = {
4117 'boolean': 'CHAR(1)',
4118 'string': 'NVARCHAR(%(length)s)',
4119 'text': 'NCLOB',
4120 'json': 'NCLOB',
4121 'password': 'NVARCHAR(%(length)s)',
4122 'blob': 'BLOB',
4123 'upload': 'VARCHAR(%(length)s)',
4124 'integer': 'INTEGER4',
4125 'bigint': 'BIGINT',
4126 'float': 'FLOAT',
4127 'double': 'FLOAT8',
4128 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
4129 'date': 'ANSIDATE',
4130 'time': 'TIME WITHOUT TIME ZONE',
4131 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
4132 'id': 'INTEGER4 not null unique with default next value for %s'% INGRES_SEQNAME,
4133 'reference': 'INTEGER4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4134 'list:integer': 'NCLOB',
4135 'list:string': 'NCLOB',
4136 'list:reference': 'NCLOB',
4137 'big-id': 'BIGINT not null unique with default next value for %s'% INGRES_SEQNAME,
4138 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4139 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4140 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
4141 }
4142
4144 drivers = ('sapdb',)
4145
4146 support_distributed_transaction = False
4147 types = {
4148 'boolean': 'CHAR(1)',
4149 'string': 'VARCHAR(%(length)s)',
4150 'text': 'LONG',
4151 'json': 'LONG',
4152 'password': 'VARCHAR(%(length)s)',
4153 'blob': 'LONG',
4154 'upload': 'VARCHAR(%(length)s)',
4155 'integer': 'INT',
4156 'bigint': 'BIGINT',
4157 'float': 'FLOAT',
4158 'double': 'DOUBLE PRECISION',
4159 'decimal': 'FIXED(%(precision)s,%(scale)s)',
4160 'date': 'DATE',
4161 'time': 'TIME',
4162 'datetime': 'TIMESTAMP',
4163 'id': 'INT PRIMARY KEY',
4164 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4165 'list:integer': 'LONG',
4166 'list:string': 'LONG',
4167 'list:reference': 'LONG',
4168 'big-id': 'BIGINT PRIMARY KEY',
4169 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4170 }
4171
4173 return '%s_id_Seq' % table
4174
4175 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4176 if limitby:
4177 (lmin, lmax) = limitby
4178 if len(sql_w) > 1:
4179 sql_w_row = sql_w + ' AND w_row > %i' % lmin
4180 else:
4181 sql_w_row = 'WHERE w_row > %i' % lmin
4182 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
4183 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4184
4186
4187 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
4188 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
4189 % (table._tablename, table._id.name, table._sequence_name))
4190 self.execute(query)
4191
4192 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
4193
4194
4195 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4196 credential_decoder=IDENTITY, driver_args={},
4197 adapter_args={}, do_connect=True, after_connection=None):
4198 self.db = db
4199 self.dbengine = "sapdb"
4200 self.uri = uri
4201 if do_connect: self.find_driver(adapter_args,uri)
4202 self.pool_size = pool_size
4203 self.folder = folder
4204 self.db_codec = db_codec
4205 self._after_connection = after_connection
4206 self.find_or_make_work_folder()
4207 ruri = uri.split('://',1)[1]
4208 m = self.REGEX_URI.match(ruri)
4209 if not m:
4210 raise SyntaxError("Invalid URI string in DAL")
4211 user = credential_decoder(m.group('user'))
4212 if not user:
4213 raise SyntaxError('User required')
4214 password = credential_decoder(m.group('password'))
4215 if not password:
4216 password = ''
4217 host = m.group('host')
4218 if not host:
4219 raise SyntaxError('Host name required')
4220 db = m.group('db')
4221 if not db:
4222 raise SyntaxError('Database name required')
4223 def connector(user=user, password=password, database=db,
4224 host=host, driver_args=driver_args):
4225 return self.driver.Connection(user, password, database,
4226 host, **driver_args)
4227 self.connector = connector
4228 if do_connect: self.reconnect()
4229
4231 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
4232 return long(self.cursor.fetchone()[0])
4233
4235 drivers = ('cubriddb',)
4236
4237 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
4238
4239 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
4240 credential_decoder=IDENTITY, driver_args={},
4241 adapter_args={}, do_connect=True, after_connection=None):
4242 self.db = db
4243 self.dbengine = "cubrid"
4244 self.uri = uri
4245 if do_connect: self.find_driver(adapter_args,uri)
4246 self.pool_size = pool_size
4247 self.folder = folder
4248 self.db_codec = db_codec
4249 self._after_connection = after_connection
4250 self.find_or_make_work_folder()
4251 ruri = uri.split('://',1)[1]
4252 m = self.REGEX_URI.match(ruri)
4253 if not m:
4254 raise SyntaxError(
4255 "Invalid URI string in DAL: %s" % self.uri)
4256 user = credential_decoder(m.group('user'))
4257 if not user:
4258 raise SyntaxError('User required')
4259 password = credential_decoder(m.group('password'))
4260 if not password:
4261 password = ''
4262 host = m.group('host')
4263 if not host:
4264 raise SyntaxError('Host name required')
4265 db = m.group('db')
4266 if not db:
4267 raise SyntaxError('Database name required')
4268 port = int(m.group('port') or '30000')
4269 charset = m.group('charset') or 'utf8'
4270 user = credential_decoder(user)
4271 passwd = credential_decoder(password)
4272 def connector(host=host,port=port,db=db,
4273 user=user,passwd=password,driver_args=driver_args):
4274 return self.driver.connect(host,port,db,user,passwd,**driver_args)
4275 self.connector = connector
4276 if do_connect: self.reconnect()
4277
4279 self.execute('SET FOREIGN_KEY_CHECKS=1;')
4280 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4281
4286
4287 web2py_filesystem = False
4288
4290 return self.db._adapter.escape(obj)
4291
4293 if not db._adapter.dbengine in ('mysql', 'postgres', 'sqlite'):
4294 raise RuntimeError("only MySQL/Postgres/SQLite can store metadata .table files in database for now")
4295 self.db = db
4296 self.filename = filename
4297 self.mode = mode
4298 if not self.web2py_filesystem:
4299 if db._adapter.dbengine == 'mysql':
4300 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;"
4301 elif db._adapter.dbengine in ('postgres', 'sqlite'):
4302 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content TEXT, PRIMARY KEY(path));"
4303 self.db.executesql(sql)
4304 DatabaseStoredFile.web2py_filesystem = True
4305 self.p=0
4306 self.data = ''
4307 if mode in ('r','rw','a'):
4308 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \
4309 % filename
4310 rows = self.db.executesql(query)
4311 if rows:
4312 self.data = rows[0][0]
4313 elif exists(filename):
4314 datafile = open(filename, 'r')
4315 try:
4316 self.data = datafile.read()
4317 finally:
4318 datafile.close()
4319 elif mode in ('r','rw'):
4320 raise RuntimeError("File %s does not exist" % filename)
4321
4322 - def read(self, bytes):
4323 data = self.data[self.p:self.p+bytes]
4324 self.p += len(data)
4325 return data
4326
4328 i = self.data.find('\n',self.p)+1
4329 if i>0:
4330 data, self.p = self.data[self.p:i], i
4331 else:
4332 data, self.p = self.data[self.p:], len(self.data)
4333 return data
4334
4337
4339 if self.db is not None:
4340 self.db.executesql(
4341 "DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
4342 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')"\
4343 % (self.filename, self.data.replace("'","''"))
4344 self.db.executesql(query)
4345 self.db.commit()
4346 self.db = None
4347
4350
4351 @staticmethod
4353 if exists(filename):
4354 return True
4355 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
4356 try:
4357 if db.executesql(query):
4358 return True
4359 except Exception, e:
4360 if not (db._adapter.isOperationalError(e) or
4361 db._adapter.isProgrammingError(e)):
4362 raise
4363
4364 tb = traceback.format_exc()
4365 LOGGER.error("Could not retrieve %s\n%s" % (filename, tb))
4366 return False
4367
4370
4373
4374 - def file_open(self, filename, mode='rb', lock=True):
4376
4379
4381 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
4382 self.db.executesql(query)
4383 self.db.commit()
4384
4386 uploads_in_blob = True
4387
4388 REGEX_URI = re.compile('^(?P<instance>.*)/(?P<db>.*)$')
4389
4390 - def __init__(self, db, uri='google:sql://realm:domain/database',
4391 pool_size=0, folder=None, db_codec='UTF-8',
4392 credential_decoder=IDENTITY, driver_args={},
4393 adapter_args={}, do_connect=True, after_connection=None):
4394
4395 self.db = db
4396 self.dbengine = "mysql"
4397 self.uri = uri
4398 self.pool_size = pool_size
4399 self.db_codec = db_codec
4400 self._after_connection = after_connection
4401 if do_connect: self.find_driver(adapter_args, uri)
4402 self.folder = folder or pjoin('$HOME',THREAD_LOCAL.folder.split(
4403 os.sep+'applications'+os.sep,1)[1])
4404 ruri = uri.split("://")[1]
4405 m = self.REGEX_URI.match(ruri)
4406 if not m:
4407 raise SyntaxError("Invalid URI string in SQLDB: %s" % self.uri)
4408 instance = credential_decoder(m.group('instance'))
4409 self.dbstring = db = credential_decoder(m.group('db'))
4410 driver_args['instance'] = instance
4411 if not 'charset' in driver_args:
4412 driver_args['charset'] = 'utf8'
4413 self.createdb = createdb = adapter_args.get('createdb',True)
4414 if not createdb:
4415 driver_args['database'] = db
4416 def connector(driver_args=driver_args):
4417 return rdbms.connect(**driver_args)
4418 self.connector = connector
4419 if do_connect: self.reconnect()
4420
4422 if self.createdb:
4423
4424 self.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbstring)
4425 self.execute('USE %s' % self.dbstring)
4426 self.execute("SET FOREIGN_KEY_CHECKS=1;")
4427 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4428
4429 - def execute(self, command, *a, **b):
4431
4433 self.adapter_args = adapter_args
4434 self.driver = "google"
4435
4437 can_select_for_update = False
4438
4439 @staticmethod
4441 if isinstance(obj, str):
4442 return obj.decode('utf8')
4443 elif not isinstance(obj, unicode):
4444 return unicode(obj)
4445 return obj
4446
4448 return table._id > 0
4449
4451 field_is_type = fieldtype.startswith
4452 if isinstance(obj, CALLABLETYPES):
4453 obj = obj()
4454 if isinstance(fieldtype, SQLCustomType):
4455 return fieldtype.encoder(obj)
4456 if isinstance(obj, (Expression, Field)):
4457 raise SyntaxError("non supported on GAE")
4458 if self.dbengine == 'google:datastore':
4459 if isinstance(fieldtype, gae.Property):
4460 return obj
4461 is_string = isinstance(fieldtype,str)
4462 is_list = is_string and field_is_type('list:')
4463 if is_list:
4464 if not obj:
4465 obj = []
4466 if not isinstance(obj, (list, tuple)):
4467 obj = [obj]
4468 if obj == '' and not \
4469 (is_string and fieldtype[:2] in ['st','te', 'pa','up']):
4470 return None
4471 if not obj is None:
4472 if isinstance(obj, list) and not is_list:
4473 obj = [self.represent(o, fieldtype) for o in obj]
4474 elif fieldtype in ('integer','bigint','id'):
4475 obj = long(obj)
4476 elif fieldtype == 'double':
4477 obj = float(obj)
4478 elif is_string and field_is_type('reference'):
4479 if isinstance(obj, (Row, Reference)):
4480 obj = obj['id']
4481 obj = long(obj)
4482 elif fieldtype == 'boolean':
4483 if obj and not str(obj)[0].upper() in '0F':
4484 obj = True
4485 else:
4486 obj = False
4487 elif fieldtype == 'date':
4488 if not isinstance(obj, datetime.date):
4489 (y, m, d) = map(int,str(obj).strip().split('-'))
4490 obj = datetime.date(y, m, d)
4491 elif isinstance(obj,datetime.datetime):
4492 (y, m, d) = (obj.year, obj.month, obj.day)
4493 obj = datetime.date(y, m, d)
4494 elif fieldtype == 'time':
4495 if not isinstance(obj, datetime.time):
4496 time_items = map(int,str(obj).strip().split(':')[:3])
4497 if len(time_items) == 3:
4498 (h, mi, s) = time_items
4499 else:
4500 (h, mi, s) = time_items + [0]
4501 obj = datetime.time(h, mi, s)
4502 elif fieldtype == 'datetime':
4503 if not isinstance(obj, datetime.datetime):
4504 (y, m, d) = map(int,str(obj)[:10].strip().split('-'))
4505 time_items = map(int,str(obj)[11:].strip().split(':')[:3])
4506 while len(time_items)<3:
4507 time_items.append(0)
4508 (h, mi, s) = time_items
4509 obj = datetime.datetime(y, m, d, h, mi, s)
4510 elif fieldtype == 'blob':
4511 pass
4512 elif fieldtype == 'json':
4513 if isinstance(obj, basestring):
4514 obj = self.to_unicode(obj)
4515 if have_serializers:
4516 obj = serializers.loads_json(obj)
4517 elif simplejson:
4518 obj = simplejson.loads(obj)
4519 else:
4520 raise RuntimeError("missing simplejson")
4521 elif is_string and field_is_type('list:string'):
4522 return map(self.to_unicode,obj)
4523 elif is_list:
4524 return map(int,obj)
4525 else:
4526 obj = self.to_unicode(obj)
4527 return obj
4528
4530 return 'insert %s in %s' % (fields, table)
4531
4532 - def _count(self,query,distinct=None):
4533 return 'count %s' % repr(query)
4534
4535 - def _select(self,query,fields,attributes):
4536 return 'select %s where %s' % (repr(fields), repr(query))
4537
4538 - def _delete(self,tablename, query):
4539 return 'delete %s where %s' % (repr(tablename),repr(query))
4540
4541 - def _update(self,tablename,query,fields):
4542 return 'update %s (%s) where %s' % (repr(tablename),
4543 repr(fields),repr(query))
4544
4546 """
4547 remember: no transactions on many NoSQL
4548 """
4549 pass
4550
4552 """
4553 remember: no transactions on many NoSQL
4554 """
4555 pass
4556
4558 """
4559 remember: no transactions on many NoSQL
4560 """
4561 pass
4562
4563
4564
4565 - def OR(self,first,second): raise SyntaxError("Not supported")
4566 - def AND(self,first,second): raise SyntaxError("Not supported")
4567 - def AS(self,first,second): raise SyntaxError("Not supported")
4568 - def ON(self,first,second): raise SyntaxError("Not supported")
4569 - def STARTSWITH(self,first,second=None): raise SyntaxError("Not supported")
4570 - def ENDSWITH(self,first,second=None): raise SyntaxError("Not supported")
4571 - def ADD(self,first,second): raise SyntaxError("Not supported")
4572 - def SUB(self,first,second): raise SyntaxError("Not supported")
4573 - def MUL(self,first,second): raise SyntaxError("Not supported")
4574 - def DIV(self,first,second): raise SyntaxError("Not supported")
4575 - def LOWER(self,first): raise SyntaxError("Not supported")
4576 - def UPPER(self,first): raise SyntaxError("Not supported")
4578 - def LENGTH(self, first): raise SyntaxError("Not supported")
4579 - def AGGREGATE(self,first,what): raise SyntaxError("Not supported")
4580 - def LEFT_JOIN(self): raise SyntaxError("Not supported")
4581 - def RANDOM(self): raise SyntaxError("Not supported")
4582 - def SUBSTRING(self,field,parameters): raise SyntaxError("Not supported")
4583 - def PRIMARY_KEY(self,key): raise SyntaxError("Not supported")
4584 - def ILIKE(self,first,second): raise SyntaxError("Not supported")
4585 - def drop(self,table,mode): raise SyntaxError("Not supported")
4586 - def alias(self,table,alias): raise SyntaxError("Not supported")
4587 - def migrate_table(self,*a,**b): raise SyntaxError("Not supported")
4589 - def prepare(self,key): raise SyntaxError("Not supported")
4592 - def concat_add(self,table): raise SyntaxError("Not supported")
4593 - def constraint_name(self, table, fieldname): raise SyntaxError("Not supported")
4595 - def log_execute(self,*a,**b): raise SyntaxError("Not supported")
4596 - def execute(self,*a,**b): raise SyntaxError("Not supported")
4598 - def lastrowid(self,table): raise SyntaxError("Not supported")
4599 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError("Not supported")
4600
4601
4602 -class GAEF(object):
4603 - def __init__(self,name,op,value,apply):
4604 self.name=name=='id' and '__key__' or name
4605 self.op=op
4606 self.value=value
4607 self.apply=apply
4609 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
4610
4612 uploads_in_blob = True
4613 types = {}
4614
4616 - def file_open(self, filename, mode='rb', lock=True): pass
4618
4619 REGEX_NAMESPACE = re.compile('.*://(?P<namespace>.+)')
4620
4621 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4622 credential_decoder=IDENTITY, driver_args={},
4623 adapter_args={}, do_connect=True, after_connection=None):
4624 self.types.update({
4625 'boolean': gae.BooleanProperty,
4626 'string': (lambda **kwargs: gae.StringProperty(multiline=True, **kwargs)),
4627 'text': gae.TextProperty,
4628 'json': gae.TextProperty,
4629 'password': gae.StringProperty,
4630 'blob': gae.BlobProperty,
4631 'upload': gae.StringProperty,
4632 'integer': gae.IntegerProperty,
4633 'bigint': gae.IntegerProperty,
4634 'float': gae.FloatProperty,
4635 'double': gae.FloatProperty,
4636 'decimal': GAEDecimalProperty,
4637 'date': gae.DateProperty,
4638 'time': gae.TimeProperty,
4639 'datetime': gae.DateTimeProperty,
4640 'id': None,
4641 'reference': gae.IntegerProperty,
4642 'list:string': (lambda **kwargs: gae.StringListProperty(default=None, **kwargs)),
4643 'list:integer': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
4644 'list:reference': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
4645 })
4646 self.db = db
4647 self.uri = uri
4648 self.dbengine = 'google:datastore'
4649 self.folder = folder
4650 db['_lastsql'] = ''
4651 self.db_codec = 'UTF-8'
4652 self._after_connection = after_connection
4653 self.pool_size = 0
4654 match = self.REGEX_NAMESPACE.match(uri)
4655 if match:
4656 namespace_manager.set_namespace(match.group('namespace'))
4657
4658 - def parse_id(self, value, field_type):
4660
4661 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
4662 myfields = {}
4663 for field in table:
4664 if isinstance(polymodel,Table) and field.name in polymodel.fields():
4665 continue
4666 attr = {}
4667 if isinstance(field.custom_qualifier, dict):
4668
4669 attr = field.custom_qualifier
4670 field_type = field.type
4671 if isinstance(field_type, SQLCustomType):
4672 ftype = self.types[field_type.native or field_type.type](**attr)
4673 elif isinstance(field_type, gae.Property):
4674 ftype = field_type
4675 elif field_type.startswith('id'):
4676 continue
4677 elif field_type.startswith('decimal'):
4678 precision, scale = field_type[7:].strip('()').split(',')
4679 precision = int(precision)
4680 scale = int(scale)
4681 ftype = GAEDecimalProperty(precision, scale, **attr)
4682 elif field_type.startswith('reference'):
4683 if field.notnull:
4684 attr = dict(required=True)
4685 referenced = field_type[10:].strip()
4686 ftype = self.types[field_type[:9]](referenced, **attr)
4687 elif field_type.startswith('list:reference'):
4688 if field.notnull:
4689 attr['required'] = True
4690 referenced = field_type[15:].strip()
4691 ftype = self.types[field_type[:14]](**attr)
4692 elif field_type.startswith('list:'):
4693 ftype = self.types[field_type](**attr)
4694 elif not field_type in self.types\
4695 or not self.types[field_type]:
4696 raise SyntaxError('Field: unknown field type: %s' % field_type)
4697 else:
4698 ftype = self.types[field_type](**attr)
4699 myfields[field.name] = ftype
4700 if not polymodel:
4701 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
4702 elif polymodel==True:
4703 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
4704 elif isinstance(polymodel,Table):
4705 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
4706 else:
4707 raise SyntaxError("polymodel must be None, True, a table or a tablename")
4708 return None
4709
4710 - def expand(self,expression,field_type=None):
4711 if isinstance(expression,Field):
4712 if expression.type in ('text', 'blob', 'json'):
4713 raise SyntaxError('AppEngine does not index by: %s' % expression.type)
4714 return expression.name
4715 elif isinstance(expression, (Expression, Query)):
4716 if not expression.second is None:
4717 return expression.op(expression.first, expression.second)
4718 elif not expression.first is None:
4719 return expression.op(expression.first)
4720 else:
4721 return expression.op()
4722 elif field_type:
4723 return self.represent(expression,field_type)
4724 elif isinstance(expression,(list,tuple)):
4725 return ','.join([self.represent(item,field_type) for item in expression])
4726 else:
4727 return str(expression)
4728
4729
4730 - def AND(self,first,second):
4736
4737 - def EQ(self,first,second=None):
4738 if isinstance(second, Key):
4739 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
4740 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
4741
4742 - def NE(self,first,second=None):
4743 if first.type != 'id':
4744 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
4745 else:
4746 if not second is None:
4747 second = Key.from_path(first._tablename, long(second))
4748 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
4749
4750 - def LT(self,first,second=None):
4751 if first.type != 'id':
4752 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
4753 else:
4754 second = Key.from_path(first._tablename, long(second))
4755 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
4756
4757 - def LE(self,first,second=None):
4758 if first.type != 'id':
4759 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
4760 else:
4761 second = Key.from_path(first._tablename, long(second))
4762 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
4763
4764 - def GT(self,first,second=None):
4765 if first.type != 'id' or second==0 or second == '0':
4766 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
4767 else:
4768 second = Key.from_path(first._tablename, long(second))
4769 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
4770
4771 - def GE(self,first,second=None):
4772 if first.type != 'id':
4773 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
4774 else:
4775 second = Key.from_path(first._tablename, long(second))
4776 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
4777
4780
4781 - def COMMA(self,first,second):
4783
4784 - def BELONGS(self,first,second=None):
4785 if not isinstance(second,(list, tuple)):
4786 raise SyntaxError("Not supported")
4787 if first.type != 'id':
4788 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
4789 else:
4790 second = [Key.from_path(first._tablename, int(i)) for i in second]
4791 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
4792
4793 - def CONTAINS(self,first,second,case_sensitive=False):
4798
4799 - def NOT(self,first):
4800 nops = { self.EQ: self.NE,
4801 self.NE: self.EQ,
4802 self.LT: self.GE,
4803 self.GT: self.LE,
4804 self.LE: self.GT,
4805 self.GE: self.LT}
4806 if not isinstance(first,Query):
4807 raise SyntaxError("Not suported")
4808 nop = nops.get(first.op,None)
4809 if not nop:
4810 raise SyntaxError("Not suported %s" % first.op.__name__)
4811 first.op = nop
4812 return self.expand(first)
4813
4816
4817 - def select_raw(self,query,fields=None,attributes=None):
4818 db = self.db
4819 fields = fields or []
4820 attributes = attributes or {}
4821 args_get = attributes.get
4822 new_fields = []
4823 for item in fields:
4824 if isinstance(item,SQLALL):
4825 new_fields += item._table
4826 else:
4827 new_fields.append(item)
4828 fields = new_fields
4829 if query:
4830 tablename = self.get_table(query)
4831 elif fields:
4832 tablename = fields[0].tablename
4833 query = db._adapter.id_query(fields[0].table)
4834 else:
4835 raise SyntaxError("Unable to determine a tablename")
4836
4837 if query:
4838 if use_common_filters(query):
4839 query = self.common_filter(query,[tablename])
4840
4841
4842 tableobj = db[tablename]._tableobj
4843 filters = self.expand(query)
4844
4845 projection = None
4846 if len(db[tablename].fields) == len(fields):
4847
4848 projection = None
4849 elif args_get('projection') == True:
4850 projection = []
4851 for f in fields:
4852 if f.type in ['text', 'blob', 'json']:
4853 raise SyntaxError(
4854 "text and blob field types not allowed in projection queries")
4855 else:
4856 projection.append(f.name)
4857 elif args_get('filterfields') == True:
4858 projection = []
4859 for f in fields:
4860 projection.append(f.name)
4861
4862
4863
4864 query_projection = [
4865 p for p in projection if \
4866 p != db[tablename]._id.name] if projection and \
4867 args_get('projection') == True\
4868 else None
4869
4870 cursor = None
4871 if isinstance(args_get('reusecursor'), str):
4872 cursor = args_get('reusecursor')
4873 items = gae.Query(tableobj, projection=query_projection,
4874 cursor=cursor)
4875
4876 for filter in filters:
4877 if args_get('projection') == True and \
4878 filter.name in query_projection and \
4879 filter.op in ['=', '<=', '>=']:
4880 raise SyntaxError(
4881 "projection fields cannot have equality filters")
4882 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
4883 continue
4884 elif filter.name=='__key__' and filter.op=='=':
4885 if filter.value==0:
4886 items = []
4887 elif isinstance(filter.value, Key):
4888
4889
4890
4891 item = tableobj.get(filter.value)
4892 items = (item and [item]) or []
4893 else:
4894
4895
4896
4897 item = tableobj.get_by_id(filter.value)
4898 items = (item and [item]) or []
4899 elif isinstance(items,list):
4900 items = [i for i in items if filter.apply(
4901 getattr(item,filter.name),filter.value)]
4902 else:
4903 if filter.name=='__key__' and filter.op != 'in':
4904 items.order('__key__')
4905 items = items.filter('%s %s' % (filter.name,filter.op),
4906 filter.value)
4907 if not isinstance(items,list):
4908 if args_get('left', None):
4909 raise SyntaxError('Set: no left join in appengine')
4910 if args_get('groupby', None):
4911 raise SyntaxError('Set: no groupby in appengine')
4912 orderby = args_get('orderby', False)
4913 if orderby:
4914
4915 if isinstance(orderby, (list, tuple)):
4916 orderby = xorify(orderby)
4917 if isinstance(orderby,Expression):
4918 orderby = self.expand(orderby)
4919 orders = orderby.split(', ')
4920 for order in orders:
4921 order={'-id':'-__key__','id':'__key__'}.get(order,order)
4922 items = items.order(order)
4923 if args_get('limitby', None):
4924 (lmin, lmax) = attributes['limitby']
4925 (limit, offset) = (lmax - lmin, lmin)
4926 rows = items.fetch(limit,offset=offset)
4927
4928
4929 if args_get('reusecursor'):
4930 db['_lastcursor'] = items.cursor()
4931 items = rows
4932 return (items, tablename, projection or db[tablename].fields)
4933
4934 - def select(self,query,fields,attributes):
4935 """
4936 This is the GAE version of select. some notes to consider:
4937 - db['_lastsql'] is not set because there is not SQL statement string
4938 for a GAE query
4939 - 'nativeRef' is a magical fieldname used for self references on GAE
4940 - optional attribute 'projection' when set to True will trigger
4941 use of the GAE projection queries. note that there are rules for
4942 what is accepted imposed by GAE: each field must be indexed,
4943 projection queries cannot contain blob or text fields, and you
4944 cannot use == and also select that same field. see https://developers.google.com/appengine/docs/python/datastore/queries#Query_Projection
4945 - optional attribute 'filterfields' when set to True web2py will only
4946 parse the explicitly listed fields into the Rows object, even though
4947 all fields are returned in the query. This can be used to reduce
4948 memory usage in cases where true projection queries are not
4949 usable.
4950 - optional attribute 'reusecursor' allows use of cursor with queries
4951 that have the limitby attribute. Set the attribute to True for the
4952 first query, set it to the value of db['_lastcursor'] to continue
4953 a previous query. The user must save the cursor value between
4954 requests, and the filters must be identical. It is up to the user
4955 to follow google's limitations: https://developers.google.com/appengine/docs/python/datastore/queries#Query_Cursors
4956 """
4957
4958 (items, tablename, fields) = self.select_raw(query,fields,attributes)
4959
4960 rows = [[(t==self.db[tablename]._id.name and item) or \
4961 (t=='nativeRef' and item) or getattr(item, t) \
4962 for t in fields] for item in items]
4963 colnames = ['%s.%s' % (tablename, t) for t in fields]
4964 processor = attributes.get('processor',self.parse)
4965 return processor(rows,fields,colnames,False)
4966
4967 - def count(self,query,distinct=None,limit=None):
4968 if distinct:
4969 raise RuntimeError("COUNT DISTINCT not supported")
4970 (items, tablename, fields) = self.select_raw(query)
4971
4972 try:
4973 return len(items)
4974 except TypeError:
4975 return items.count(limit=limit)
4976
4977 - def delete(self,tablename, query):
4978 """
4979 This function was changed on 2010-05-04 because according to
4980 http://code.google.com/p/googleappengine/issues/detail?id=3119
4981 GAE no longer supports deleting more than 1000 records.
4982 """
4983
4984 (items, tablename, fields) = self.select_raw(query)
4985
4986 if not isinstance(items,list):
4987
4988
4989 leftitems = items.fetch(1000, keys_only=True)
4990 counter = 0
4991 while len(leftitems):
4992 counter += len(leftitems)
4993 gae.delete(leftitems)
4994 leftitems = items.fetch(1000, keys_only=True)
4995 else:
4996 counter = len(items)
4997 gae.delete(items)
4998 return counter
4999
5000 - def update(self,tablename,query,update_fields):
5001
5002 (items, tablename, fields) = self.select_raw(query)
5003 counter = 0
5004 for item in items:
5005 for field, value in update_fields:
5006 setattr(item, field.name, self.represent(value,field.type))
5007 item.put()
5008 counter += 1
5009 LOGGER.info(str(counter))
5010 return counter
5011
5012 - def insert(self,table,fields):
5013 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
5014
5015 tmp = table._tableobj(**dfields)
5016 tmp.put()
5017 rid = Reference(tmp.key().id())
5018 (rid._table, rid._record, rid._gaekey) = (table, None, tmp.key())
5019 return rid
5020
5022 parsed_items = []
5023 for item in items:
5024 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
5025 parsed_items.append(table._tableobj(**dfields))
5026 gae.put(parsed_items)
5027 return True
5028
5030 return uuid.UUID(uuidv).int
5031
5033 return str(uuid.UUID(int=n))
5034
5036 drivers = ('couchdb',)
5037
5038 uploads_in_blob = True
5039 types = {
5040 'boolean': bool,
5041 'string': str,
5042 'text': str,
5043 'json': str,
5044 'password': str,
5045 'blob': str,
5046 'upload': str,
5047 'integer': long,
5048 'bigint': long,
5049 'float': float,
5050 'double': float,
5051 'date': datetime.date,
5052 'time': datetime.time,
5053 'datetime': datetime.datetime,
5054 'id': long,
5055 'reference': long,
5056 'list:string': list,
5057 'list:integer': list,
5058 'list:reference': list,
5059 }
5060
5062 - def file_open(self, filename, mode='rb', lock=True): pass
5064
5065 - def expand(self,expression,field_type=None):
5066 if isinstance(expression,Field):
5067 if expression.type=='id':
5068 return "%s._id" % expression.tablename
5069 return BaseAdapter.expand(self,expression,field_type)
5070
5071 - def AND(self,first,second):
5073
5074 - def OR(self,first,second):
5076
5077 - def EQ(self,first,second):
5081
5082 - def NE(self,first,second):
5086
5087 - def COMMA(self,first,second):
5089
5091 value = NoSQLAdapter.represent(self, obj, fieldtype)
5092 if fieldtype=='id':
5093 return repr(str(long(value)))
5094 elif fieldtype in ('date','time','datetime','boolean'):
5095 return serializers.json(value)
5096 return repr(not isinstance(value,unicode) and value \
5097 or value and value.encode('utf8'))
5098
5099 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
5100 pool_size=0,folder=None,db_codec ='UTF-8',
5101 credential_decoder=IDENTITY, driver_args={},
5102 adapter_args={}, do_connect=True, after_connection=None):
5103 self.db = db
5104 self.uri = uri
5105 if do_connect: self.find_driver(adapter_args)
5106 self.dbengine = 'couchdb'
5107 self.folder = folder
5108 db['_lastsql'] = ''
5109 self.db_codec = 'UTF-8'
5110 self._after_connection = after_connection
5111 self.pool_size = pool_size
5112
5113 url='http://'+uri[10:]
5114 def connector(url=url,driver_args=driver_args):
5115 return self.driver.Server(url,**driver_args)
5116 self.reconnect(connector,cursor=False)
5117
5118 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
5119 if migrate:
5120 try:
5121 self.connection.create(table._tablename)
5122 except:
5123 pass
5124
5125 - def insert(self,table,fields):
5132
5133 - def _select(self,query,fields,attributes):
5134 if not isinstance(query,Query):
5135 raise SyntaxError("Not Supported")
5136 for key in set(attributes.keys())-SELECT_ARGS:
5137 raise SyntaxError('invalid select attribute: %s' % key)
5138 new_fields=[]
5139 for item in fields:
5140 if isinstance(item,SQLALL):
5141 new_fields += item._table
5142 else:
5143 new_fields.append(item)
5144 def uid(fd):
5145 return fd=='id' and '_id' or fd
5146 def get(row,fd):
5147 return fd=='id' and long(row['_id']) or row.get(fd,None)
5148 fields = new_fields
5149 tablename = self.get_table(query)
5150 fieldnames = [f.name for f in (fields or self.db[tablename])]
5151 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
5152 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
5153 fn="(function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);})" %\
5154 dict(t=tablename,
5155 query=self.expand(query),
5156 order='%s._id' % tablename,
5157 fields=fields)
5158 return fn, colnames
5159
5160 - def select(self,query,fields,attributes):
5161 if not isinstance(query,Query):
5162 raise SyntaxError("Not Supported")
5163 fn, colnames = self._select(query,fields,attributes)
5164 tablename = colnames[0].split('.')[0]
5165 ctable = self.connection[tablename]
5166 rows = [cols['value'] for cols in ctable.query(fn)]
5167 processor = attributes.get('processor',self.parse)
5168 return processor(rows,fields,colnames,False)
5169
5170 - def delete(self,tablename,query):
5171 if not isinstance(query,Query):
5172 raise SyntaxError("Not Supported")
5173 if query.first.type=='id' and query.op==self.EQ:
5174 id = query.second
5175 tablename = query.first.tablename
5176 assert(tablename == query.first.tablename)
5177 ctable = self.connection[tablename]
5178 try:
5179 del ctable[str(id)]
5180 return 1
5181 except couchdb.http.ResourceNotFound:
5182 return 0
5183 else:
5184 tablename = self.get_table(query)
5185 rows = self.select(query,[self.db[tablename]._id],{})
5186 ctable = self.connection[tablename]
5187 for row in rows:
5188 del ctable[str(row.id)]
5189 return len(rows)
5190
5191 - def update(self,tablename,query,fields):
5192 if not isinstance(query,Query):
5193 raise SyntaxError("Not Supported")
5194 if query.first.type=='id' and query.op==self.EQ:
5195 id = query.second
5196 tablename = query.first.tablename
5197 ctable = self.connection[tablename]
5198 try:
5199 doc = ctable[str(id)]
5200 for key,value in fields:
5201 doc[key.name] = self.represent(value,self.db[tablename][key.name].type)
5202 ctable.save(doc)
5203 return 1
5204 except couchdb.http.ResourceNotFound:
5205 return 0
5206 else:
5207 tablename = self.get_table(query)
5208 rows = self.select(query,[self.db[tablename]._id],{})
5209 ctable = self.connection[tablename]
5210 table = self.db[tablename]
5211 for row in rows:
5212 doc = ctable[str(row.id)]
5213 for key,value in fields:
5214 doc[key.name] = self.represent(value,table[key.name].type)
5215 ctable.save(doc)
5216 return len(rows)
5217
5218 - def count(self,query,distinct=None):
5219 if distinct:
5220 raise RuntimeError("COUNT DISTINCT not supported")
5221 if not isinstance(query,Query):
5222 raise SyntaxError("Not Supported")
5223 tablename = self.get_table(query)
5224 rows = self.select(query,[self.db[tablename]._id],{})
5225 return len(rows)
5226
5228 """
5229 validates that the given text is clean: only contains [0-9a-zA-Z_]
5230 """
5231 if not REGEX_ALPHANUMERIC.match(text):
5232 raise SyntaxError('invalid table or field name: %s' % text)
5233 return text
5234
5236 native_json = True
5237 drivers = ('pymongo',)
5238
5239 uploads_in_blob = True
5240
5241 types = {
5242 'boolean': bool,
5243 'string': str,
5244 'text': str,
5245 'json': str,
5246 'password': str,
5247 'blob': str,
5248 'upload': str,
5249 'integer': long,
5250 'bigint': long,
5251 'float': float,
5252 'double': float,
5253 'date': datetime.date,
5254 'time': datetime.time,
5255 'datetime': datetime.datetime,
5256 'id': long,
5257 'reference': long,
5258 'list:string': list,
5259 'list:integer': list,
5260 'list:reference': list,
5261 }
5262
5263 error_messages = {"javascript_needed": "This must yet be replaced" +
5264 " with javascript in order to work."}
5265
5266 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
5267 pool_size=0, folder=None, db_codec ='UTF-8',
5268 credential_decoder=IDENTITY, driver_args={},
5269 adapter_args={}, do_connect=True, after_connection=None):
5270
5271 self.db = db
5272 self.uri = uri
5273 if do_connect: self.find_driver(adapter_args)
5274 import random
5275 from bson.objectid import ObjectId
5276 from bson.son import SON
5277 import pymongo.uri_parser
5278
5279 m = pymongo.uri_parser.parse_uri(uri)
5280
5281 self.SON = SON
5282 self.ObjectId = ObjectId
5283 self.random = random
5284
5285 self.dbengine = 'mongodb'
5286 self.folder = folder
5287 db['_lastsql'] = ''
5288 self.db_codec = 'UTF-8'
5289 self._after_connection = after_connection
5290 self.pool_size = pool_size
5291
5292
5293 self.minimumreplication = adapter_args.get('minimumreplication',0)
5294
5295
5296
5297
5298 self.safe = adapter_args.get('safe',True)
5299
5300 if isinstance(m,tuple):
5301 m = {"database" : m[1]}
5302 if m.get('database')==None:
5303 raise SyntaxError("Database is required!")
5304
5305 def connector(uri=self.uri,m=m):
5306
5307 if hasattr(self.driver, "MongoClient"):
5308 Connection = self.driver.MongoClient
5309 else:
5310 Connection = self.driver.Connection
5311 return Connection(uri)[m.get('database')]
5312
5313 self.reconnect(connector,cursor=False)
5314
5316 """ Convert input to a valid Mongodb ObjectId instance
5317
5318 self.object_id("<random>") -> ObjectId (not unique) instance """
5319 if not arg:
5320 arg = 0
5321 if isinstance(arg, basestring):
5322
5323 rawhex = len(arg.replace("0x", "").replace("L", "")) == 24
5324 if arg.isdigit() and (not rawhex):
5325 arg = int(arg)
5326 elif arg == "<random>":
5327 arg = int("0x%sL" % \
5328 "".join([self.random.choice("0123456789abcdef") \
5329 for x in range(24)]), 0)
5330 elif arg.isalnum():
5331 if not arg.startswith("0x"):
5332 arg = "0x%s" % arg
5333 try:
5334 arg = int(arg, 0)
5335 except ValueError, e:
5336 raise ValueError(
5337 "invalid objectid argument string: %s" % e)
5338 else:
5339 raise ValueError("Invalid objectid argument string. " +
5340 "Requires an integer or base 16 value")
5341 elif isinstance(arg, self.ObjectId):
5342 return arg
5343
5344 if not isinstance(arg, (int, long)):
5345 raise TypeError("object_id argument must be of type " +
5346 "ObjectId or an objectid representable integer")
5347 if arg == 0:
5348 hexvalue = "".zfill(24)
5349 else:
5350 hexvalue = hex(arg)[2:].replace("L", "")
5351 return self.ObjectId(hexvalue)
5352
5354
5355 if isinstance(value, self.ObjectId):
5356 value = long(str(value), 16)
5357 return super(MongoDBAdapter,
5358 self).parse_reference(value, field_type)
5359
5360 - def parse_id(self, value, field_type):
5361 if isinstance(value, self.ObjectId):
5362 value = long(str(value), 16)
5363 return super(MongoDBAdapter,
5364 self).parse_id(value, field_type)
5365
5367
5368 if isinstance(obj, self.ObjectId):
5369 value = obj
5370 else:
5371 value = NoSQLAdapter.represent(self, obj, fieldtype)
5372
5373 if fieldtype =='date':
5374 if value == None:
5375 return value
5376
5377 t = datetime.time(0, 0, 0)
5378
5379
5380 return datetime.datetime.combine(value, t)
5381 elif fieldtype == 'time':
5382 if value == None:
5383 return value
5384
5385 d = datetime.date(2000, 1, 1)
5386
5387
5388 return datetime.datetime.combine(d, value)
5389 elif fieldtype == "blob":
5390 from bson import Binary
5391 if not isinstance(value, Binary):
5392 return Binary(value)
5393 return value
5394 elif (isinstance(fieldtype, basestring) and
5395 fieldtype.startswith('list:')):
5396 if fieldtype.startswith('list:reference'):
5397 newval = []
5398 for v in value:
5399 newval.append(self.object_id(v))
5400 return newval
5401 return value
5402 elif ((isinstance(fieldtype, basestring) and
5403 fieldtype.startswith("reference")) or
5404 (isinstance(fieldtype, Table)) or fieldtype=="id"):
5405 value = self.object_id(value)
5406 return value
5407
5408 - def create_table(self, table, migrate=True, fake_migrate=False,
5409 polymodel=None, isCapped=False):
5410 if isCapped:
5411 raise RuntimeError("Not implemented")
5412
5413 - def count(self, query, distinct=None, snapshot=True):
5414 if distinct:
5415 raise RuntimeError("COUNT DISTINCT not supported")
5416 if not isinstance(query,Query):
5417 raise SyntaxError("Not Supported")
5418 tablename = self.get_table(query)
5419 return long(self.select(query,[self.db[tablename]._id], {},
5420 count=True,snapshot=snapshot)['count'])
5421
5422
5423
5424
5425
5426 - def expand(self, expression, field_type=None):
5427 if isinstance(expression, Query):
5428
5429
5430
5431
5432
5433 if isinstance(expression.first,Field) and \
5434 ((expression.first.type == 'id') or \
5435 ("reference" in expression.first.type)):
5436 if expression.first.type == 'id':
5437 expression.first.name = '_id'
5438
5439 if isinstance(expression.second, (tuple, list, set)):
5440 expression.second = [self.object_id(item) for
5441 item in expression.second]
5442 else:
5443 expression.second = self.object_id(expression.second)
5444 result = expression.op(expression.first, expression.second)
5445
5446 if isinstance(expression, Field):
5447 if expression.type=='id':
5448 result = "_id"
5449 else:
5450 result = expression.name
5451 elif isinstance(expression, (Expression, Query)):
5452 if not expression.second is None:
5453 result = expression.op(expression.first, expression.second)
5454 elif not expression.first is None:
5455 result = expression.op(expression.first)
5456 elif not isinstance(expression.op, str):
5457 result = expression.op()
5458 else:
5459 result = expression.op
5460 elif field_type:
5461 result = self.represent(expression,field_type)
5462 elif isinstance(expression,(list,tuple)):
5463 result = ','.join(self.represent(item,field_type) for
5464 item in expression)
5465 else:
5466 result = expression
5467 return result
5468
5469 - def drop(self, table, mode=''):
5472
5473 - def truncate(self, table, mode, safe=None):
5474 if safe == None:
5475 safe=self.safe
5476 ctable = self.connection[table._tablename]
5477 ctable.remove(None, safe=True)
5478
5479 - def _select(self, query, fields, attributes):
5480 if 'for_update' in attributes:
5481 logging.warn('mongodb does not support for_update')
5482 for key in set(attributes.keys())-set(('limitby',
5483 'orderby','for_update')):
5484 if attributes[key]!=None:
5485 logging.warn('select attribute not implemented: %s' % key)
5486
5487 new_fields=[]
5488 mongosort_list = []
5489
5490
5491 orderby = attributes.get('orderby', False)
5492 limitby = attributes.get('limitby', False)
5493
5494 if orderby:
5495 if isinstance(orderby, (list, tuple)):
5496 orderby = xorify(orderby)
5497
5498
5499 for f in self.expand(orderby).split(','):
5500 if f.startswith('-'):
5501 mongosort_list.append((f[1:], -1))
5502 else:
5503 mongosort_list.append((f, 1))
5504 if limitby:
5505 limitby_skip, limitby_limit = limitby[0], int(limitby[1])
5506 else:
5507 limitby_skip = limitby_limit = 0
5508
5509 mongofields_dict = self.SON()
5510 mongoqry_dict = {}
5511 for item in fields:
5512 if isinstance(item, SQLALL):
5513 new_fields += item._table
5514 else:
5515 new_fields.append(item)
5516 fields = new_fields
5517 if isinstance(query,Query):
5518 tablename = self.get_table(query)
5519 elif len(fields) != 0:
5520 tablename = fields[0].tablename
5521 else:
5522 raise SyntaxError("The table name could not be found in " +
5523 "the query nor from the select statement.")
5524 mongoqry_dict = self.expand(query)
5525 fields = fields or self.db[tablename]
5526 for field in fields:
5527 mongofields_dict[field.name] = 1
5528
5529 return tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
5530 limitby_limit, limitby_skip
5531
5532 - def select(self, query, fields, attributes, count=False,
5533 snapshot=False):
5534
5535 tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
5536 limitby_limit, limitby_skip = self._select(query, fields, attributes)
5537 ctable = self.connection[tablename]
5538
5539 if count:
5540 return {'count' : ctable.find(
5541 mongoqry_dict, mongofields_dict,
5542 skip=limitby_skip, limit=limitby_limit,
5543 sort=mongosort_list, snapshot=snapshot).count()}
5544 else:
5545
5546 mongo_list_dicts = ctable.find(mongoqry_dict,
5547 mongofields_dict, skip=limitby_skip,
5548 limit=limitby_limit, sort=mongosort_list,
5549 snapshot=snapshot)
5550 rows = []
5551
5552
5553 colnames = []
5554 newnames = []
5555 for field in fields:
5556 colname = str(field)
5557 colnames.append(colname)
5558 tablename, fieldname = colname.split(".")
5559 if fieldname == "_id":
5560
5561 field.name = "id"
5562 newnames.append(".".join((tablename, field.name)))
5563
5564 for record in mongo_list_dicts:
5565 row=[]
5566 for colname in colnames:
5567 tablename, fieldname = colname.split(".")
5568
5569
5570 if fieldname == "id": fieldname = "_id"
5571 if fieldname in record:
5572 value = record[fieldname]
5573 else:
5574 value = None
5575 row.append(value)
5576 rows.append(row)
5577
5578 processor = attributes.get('processor', self.parse)
5579 result = processor(rows, fields, newnames, False)
5580 return result
5581
5582 - def _insert(self, table, fields):
5590
5591
5592
5593
5594 - def insert(self, table, fields, safe=None):
5601
5602
5603 - def _update(self, tablename, query, fields):
5604 if not isinstance(query, Query):
5605 raise SyntaxError("Not Supported")
5606 filter = None
5607 if query:
5608 filter = self.expand(query)
5609
5610 modify = {'$set': dict((k.name, self.represent(v, k.type)) for
5611 k, v in fields if (not k.name in ("_id", "id")))}
5612 return modify, filter
5613
5614 - def update(self, tablename, query, fields, safe=None):
5615 if safe == None:
5616 safe = self.safe
5617
5618
5619 if not isinstance(query, Query):
5620 raise RuntimeError("Not implemented")
5621 amount = self.count(query, False)
5622 modify, filter = self._update(tablename, query, fields)
5623 try:
5624 result = self.connection[tablename].update(filter,
5625 modify, multi=True, safe=safe)
5626 if safe:
5627 try:
5628
5629 return result["n"]
5630 except (KeyError, AttributeError, TypeError):
5631 return amount
5632 else:
5633 return amount
5634 except Exception, e:
5635
5636 raise RuntimeError("uncaught exception when updating rows: %s" % e)
5637
5638 - def _delete(self, tablename, query):
5639 if not isinstance(query, Query):
5640 raise RuntimeError("query type %s is not supported" % \
5641 type(query))
5642 return self.expand(query)
5643
5644 - def delete(self, tablename, query, safe=None):
5645 if safe is None:
5646 safe = self.safe
5647 amount = 0
5648 amount = self.count(query, False)
5649 filter = self._delete(tablename, query)
5650 self.connection[tablename].remove(filter, safe=safe)
5651 return amount
5652
5654 return [self.insert(table,item) for item in items]
5655
5656
5660
5661
5662 - def NOT(self, first):
5663 result = {}
5664 result["$not"] = self.expand(first)
5665 return result
5666
5667 - def AND(self,first,second):
5672
5673 - def OR(self,first,second):
5674
5675 result = {}
5676 f = self.expand(first)
5677 s = self.expand(second)
5678 result['$or'] = [f,s]
5679 return result
5680
5681 - def BELONGS(self, first, second):
5682 if isinstance(second, str):
5683 return {self.expand(first) : {"$in" : [ second[:-1]]} }
5684 elif second==[] or second==() or second==set():
5685 return {1:0}
5686 items = [self.expand(item, first.type) for item in second]
5687 return {self.expand(first) : {"$in" : items} }
5688
5689 - def EQ(self,first,second=None):
5690 result = {}
5691 result[self.expand(first)] = self.expand(second)
5692 return result
5693
5694 - def NE(self, first, second=None):
5695 result = {}
5696 result[self.expand(first)] = {'$ne': self.expand(second)}
5697 return result
5698
5699 - def LT(self,first,second=None):
5700 if second is None:
5701 raise RuntimeError("Cannot compare %s < None" % first)
5702 result = {}
5703 result[self.expand(first)] = {'$lt': self.expand(second)}
5704 return result
5705
5706 - def LE(self,first,second=None):
5707 if second is None:
5708 raise RuntimeError("Cannot compare %s <= None" % first)
5709 result = {}
5710 result[self.expand(first)] = {'$lte': self.expand(second)}
5711 return result
5712
5713 - def GT(self,first,second):
5714 result = {}
5715 result[self.expand(first)] = {'$gt': self.expand(second)}
5716 return result
5717
5718 - def GE(self,first,second=None):
5719 if second is None:
5720 raise RuntimeError("Cannot compare %s >= None" % first)
5721 result = {}
5722 result[self.expand(first)] = {'$gte': self.expand(second)}
5723 return result
5724
5725 - def ADD(self, first, second):
5729
5730 - def SUB(self, first, second):
5734
5735 - def MUL(self, first, second):
5739
5740 - def DIV(self, first, second):
5744
5745 - def MOD(self, first, second):
5749
5750 - def AS(self, first, second):
5751 raise NotImplementedError(self.error_messages["javascript_needed"])
5752 return '%s AS %s' % (self.expand(first), second)
5753
5754
5755
5756
5757 - def ON(self, first, second):
5758 raise NotImplementedError("This is not possible in NoSQL" +
5759 " but can be simulated with a wrapper.")
5760 return '%s ON %s' % (self.expand(first), self.expand(second))
5761
5762
5763
5764
5765 - def COMMA(self, first, second):
5767
5768 - def LIKE(self, first, second):
5772
5774
5775 return {self.expand(first): ('/^%s/' % \
5776 self.expand(second, 'string'))}
5777
5779
5780 return {self.expand(first): ('/%s^/' % \
5781 self.expand(second, 'string'))}
5782
5783 - def CONTAINS(self, first, second, case_sensitive=False):
5784
5785
5786
5787 val = second if isinstance(second,self.ObjectId) else \
5788 {'$regex':".*" + re.escape(self.expand(second, 'string')) + ".*"}
5789 return {self.expand(first) : val}
5790
5791 - def LIKE(self, first, second):
5796
5797
5799
5800 import re
5801 return {self.expand(first): {'$regex' : '^' +
5802 re.escape(self.expand(second,
5803 'string'))}}
5804
5805
5807
5808
5809
5810
5811 import re
5812 return {self.expand(first): {'$regex': \
5813 re.escape(self.expand(second, 'string')) + '$'}}
5814
5815
5816 - def CONTAINS(self, first, second, case_sensitive=False):
5817
5818
5819
5820
5821 return {self.expand(first) : {'$regex': \
5822 ".*" + re.escape(self.expand(second, 'string')) + ".*"}}
5823
5826 drivers = ('imaplib',)
5827
5828 """ IMAP server adapter
5829
5830 This class is intended as an interface with
5831 email IMAP servers to perform simple queries in the
5832 web2py DAL query syntax, so email read, search and
5833 other related IMAP mail services (as those implemented
5834 by brands like Google(r), and Yahoo!(r)
5835 can be managed from web2py applications.
5836
5837 The code uses examples by Yuji Tomita on this post:
5838 http://yuji.wordpress.com/2011/06/22/python-imaplib-imap-example-with-gmail/#comment-1137
5839 and is based in docs for Python imaplib, python email
5840 and email IETF's (i.e. RFC2060 and RFC3501)
5841
5842 This adapter was tested with a small set of operations with Gmail(r). Other
5843 services requests could raise command syntax and response data issues.
5844
5845 It creates its table and field names "statically",
5846 meaning that the developer should leave the table and field
5847 definitions to the DAL instance by calling the adapter's
5848 .define_tables() method. The tables are defined with the
5849 IMAP server mailbox list information.
5850
5851 .define_tables() returns a dictionary mapping dal tablenames
5852 to the server mailbox names with the following structure:
5853
5854 {<tablename>: str <server mailbox name>}
5855
5856 Here is a list of supported fields:
5857
5858 Field Type Description
5859 ################################################################
5860 uid string
5861 answered boolean Flag
5862 created date
5863 content list:string A list of dict text or html parts
5864 to string
5865 cc string
5866 bcc string
5867 size integer the amount of octets of the message*
5868 deleted boolean Flag
5869 draft boolean Flag
5870 flagged boolean Flag
5871 sender string
5872 recent boolean Flag
5873 seen boolean Flag
5874 subject string
5875 mime string The mime header declaration
5876 email string The complete RFC822 message**
5877 attachments <type list> Each non text part as dict
5878 encoding string The main detected encoding
5879
5880 *At the application side it is measured as the length of the RFC822
5881 message string
5882
5883 WARNING: As row id's are mapped to email sequence numbers,
5884 make sure your imap client web2py app does not delete messages
5885 during select or update actions, to prevent
5886 updating or deleting different messages.
5887 Sequence numbers change whenever the mailbox is updated.
5888 To avoid this sequence numbers issues, it is recommended the use
5889 of uid fields in query references (although the update and delete
5890 in separate actions rule still applies).
5891
5892 # This is the code recommended to start imap support
5893 # at the app's model:
5894
5895 imapdb = DAL("imap://user:password@server:port", pool_size=1) # port 993 for ssl
5896 imapdb.define_tables()
5897
5898 Here is an (incomplete) list of possible imap commands:
5899
5900 # Count today's unseen messages
5901 # smaller than 6000 octets from the
5902 # inbox mailbox
5903
5904 q = imapdb.INBOX.seen == False
5905 q &= imapdb.INBOX.created == datetime.date.today()
5906 q &= imapdb.INBOX.size < 6000
5907 unread = imapdb(q).count()
5908
5909 # Fetch last query messages
5910 rows = imapdb(q).select()
5911
5912 # it is also possible to filter query select results with limitby and
5913 # sequences of mailbox fields
5914
5915 set.select(<fields sequence>, limitby=(<int>, <int>))
5916
5917 # Mark last query messages as seen
5918 messages = [row.uid for row in rows]
5919 seen = imapdb(imapdb.INBOX.uid.belongs(messages)).update(seen=True)
5920
5921 # Delete messages in the imap database that have mails from mr. Gumby
5922
5923 deleted = 0
5924 for mailbox in imapdb.tables
5925 deleted += imapdb(imapdb[mailbox].sender.contains("gumby")).delete()
5926
5927 # It is possible also to mark messages for deletion instead of ereasing them
5928 # directly with set.update(deleted=True)
5929
5930
5931 # This object give access
5932 # to the adapter auto mailbox
5933 # mapped names (which native
5934 # mailbox has what table name)
5935
5936 imapdb.mailboxes <dict> # tablename, server native name pairs
5937
5938 # To retrieve a table native mailbox name use:
5939 imapdb.<table>.mailbox
5940
5941 ### New features v2.4.1:
5942
5943 # Declare mailboxes statically with tablename, name pairs
5944 # This avoids the extra server names retrieval
5945
5946 imapdb.define_tables({"inbox": "INBOX"})
5947
5948 # Selects without content/attachments/email columns will only
5949 # fetch header and flags
5950
5951 imapdb(q).select(imapdb.INBOX.sender, imapdb.INBOX.subject)
5952 """
5953
5954 types = {
5955 'string': str,
5956 'text': str,
5957 'date': datetime.date,
5958 'datetime': datetime.datetime,
5959 'id': long,
5960 'boolean': bool,
5961 'integer': int,
5962 'bigint': long,
5963 'blob': str,
5964 'list:string': str,
5965 }
5966
5967 dbengine = 'imap'
5968
5969 REGEX_URI = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?$')
5970
5971 - def __init__(self,
5972 db,
5973 uri,
5974 pool_size=0,
5975 folder=None,
5976 db_codec ='UTF-8',
5977 credential_decoder=IDENTITY,
5978 driver_args={},
5979 adapter_args={},
5980 do_connect=True,
5981 after_connection=None):
5982
5983
5984
5985
5986 self.db = db
5987 self.uri = uri
5988 if do_connect: self.find_driver(adapter_args)
5989 self.pool_size=pool_size
5990 self.folder = folder
5991 self.db_codec = db_codec
5992 self._after_connection = after_connection
5993 self.credential_decoder = credential_decoder
5994 self.driver_args = driver_args
5995 self.adapter_args = adapter_args
5996 self.mailbox_size = None
5997 self.static_names = None
5998 self.charset = sys.getfilesystemencoding()
5999
6000 self.imap4 = None
6001 uri = uri.split("://")[1]
6002
6003 """ MESSAGE is an identifier for sequence number"""
6004
6005 self.flags = {'deleted': '\\Deleted', 'draft': '\\Draft',
6006 'flagged': '\\Flagged', 'recent': '\\Recent',
6007 'seen': '\\Seen', 'answered': '\\Answered'}
6008 self.search_fields = {
6009 'id': 'MESSAGE', 'created': 'DATE',
6010 'uid': 'UID', 'sender': 'FROM',
6011 'to': 'TO', 'cc': 'CC',
6012 'bcc': 'BCC', 'content': 'TEXT',
6013 'size': 'SIZE', 'deleted': '\\Deleted',
6014 'draft': '\\Draft', 'flagged': '\\Flagged',
6015 'recent': '\\Recent', 'seen': '\\Seen',
6016 'subject': 'SUBJECT', 'answered': '\\Answered',
6017 'mime': None, 'email': None,
6018 'attachments': None
6019 }
6020
6021 db['_lastsql'] = ''
6022
6023 m = self.REGEX_URI.match(uri)
6024 user = m.group('user')
6025 password = m.group('password')
6026 host = m.group('host')
6027 port = int(m.group('port'))
6028 over_ssl = False
6029 if port==993:
6030 over_ssl = True
6031
6032 driver_args.update(host=host,port=port, password=password, user=user)
6033 def connector(driver_args=driver_args):
6034
6035
6036 if over_ssl:
6037 self.imap4 = self.driver.IMAP4_SSL
6038 else:
6039 self.imap4 = self.driver.IMAP4
6040 connection = self.imap4(driver_args["host"], driver_args["port"])
6041 data = connection.login(driver_args["user"], driver_args["password"])
6042
6043
6044 connection.mailbox_names = None
6045
6046
6047 connection.cursor = lambda : True
6048
6049 return connection
6050
6051 self.db.define_tables = self.define_tables
6052 self.connector = connector
6053 if do_connect: self.reconnect()
6054
6099
6101 last_message = None
6102
6103
6104 if not isinstance(self.connection.mailbox_names, dict):
6105 self.get_mailboxes()
6106 try:
6107 result = self.connection.select(self.connection.mailbox_names[tablename])
6108 last_message = int(result[1][0])
6109 except (IndexError, ValueError, TypeError, KeyError):
6110 e = sys.exc_info()[1]
6111 LOGGER.debug("Error retrieving the last mailbox sequence number. %s" % str(e))
6112 return last_message
6113
6115 if not isinstance(self.connection.mailbox_names, dict):
6116 self.get_mailboxes()
6117
6118
6119 last_message = self.get_last_message(tablename)
6120 result, data = self.connection.uid("search", None, "(ALL)")
6121 uid_list = data[0].strip().split()
6122 if len(uid_list) <= 0:
6123 return None
6124 else:
6125 return (uid_list[0], uid_list[-1])
6126
6128 if add is None:
6129 add = datetime.timedelta()
6130 """ Convert a date object to a string
6131 with d-Mon-Y style for IMAP or the inverse
6132 case
6133
6134 add <timedelta> adds to the date object
6135 """
6136 months = [None, "JAN","FEB","MAR","APR","MAY","JUN",
6137 "JUL", "AUG","SEP","OCT","NOV","DEC"]
6138 if isinstance(date, basestring):
6139
6140 try:
6141 dayname, datestring = date.split(",")
6142 date_list = datestring.strip().split()
6143 year = int(date_list[2])
6144 month = months.index(date_list[1].upper())
6145 day = int(date_list[0])
6146 hms = map(int, date_list[3].split(":"))
6147 return datetime.datetime(year, month, day,
6148 hms[0], hms[1], hms[2]) + add
6149 except (ValueError, AttributeError, IndexError), e:
6150 LOGGER.error("Could not parse date text: %s. %s" %
6151 (date, e))
6152 return None
6153 elif isinstance(date, (datetime.datetime, datetime.date)):
6154 return (date + add).strftime("%d-%b-%Y")
6155 else:
6156 return None
6157
6158 @staticmethod
6160 from email.header import decode_header
6161 text, encoding = decode_header(f)[0]
6162 if encoding:
6163 text = text.decode(encoding).encode('utf-8')
6164 return text
6165
6166 - def encode_text(self, text, charset, errors="replace"):
6167 """ convert text for mail to unicode"""
6168 if text is None:
6169 text = ""
6170 else:
6171 if isinstance(text, str):
6172 if charset is None:
6173 text = unicode(text, "utf-8", errors)
6174 else:
6175 text = unicode(text, charset, errors)
6176 else:
6177 raise Exception("Unsupported mail text type %s" % type(text))
6178 return text.encode("utf-8")
6179
6181 charset = message.get_content_charset()
6182 return charset
6183
6185 """ Query the mail database for mailbox names """
6186 if self.static_names:
6187
6188 self.connection.mailbox_names = self.static_names
6189 return self.static_names.keys()
6190
6191 mailboxes_list = self.connection.list()
6192 self.connection.mailbox_names = dict()
6193 mailboxes = list()
6194 x = 0
6195 for item in mailboxes_list[1]:
6196 x = x + 1
6197 item = item.strip()
6198 if not "NOSELECT" in item.upper():
6199 sub_items = item.split("\"")
6200 sub_items = [sub_item for sub_item in sub_items \
6201 if len(sub_item.strip()) > 0]
6202
6203 mailbox = sub_items[-1]
6204
6205
6206 mailbox_name = re.sub('^[_0-9]*', '', re.sub('[^_\w]','',re.sub('[/ ]','_',mailbox)))
6207 mailboxes.append(mailbox_name)
6208 self.connection.mailbox_names[mailbox_name] = mailbox
6209
6210 return mailboxes
6211
6213 nofield = True
6214 tablename = None
6215 attr = query
6216 while nofield:
6217 if hasattr(attr, "first"):
6218 attr = attr.first
6219 if isinstance(attr, Field):
6220 return attr.tablename
6221 elif isinstance(attr, Query):
6222 pass
6223 else:
6224 return None
6225 else:
6226 return None
6227 return tablename
6228
6230 if self.search_fields.get(flag, None) in self.flags.values():
6231 return True
6232 else:
6233 return False
6234
6236 """
6237 Auto create common IMAP fileds
6238
6239 This function creates fields definitions "statically"
6240 meaning that custom fields as in other adapters should
6241 not be supported and definitions handled on a service/mode
6242 basis (local syntax for Gmail(r), Ymail(r)
6243
6244 Returns a dictionary with tablename, server native mailbox name
6245 pairs.
6246 """
6247 if mailbox_names:
6248
6249 self.static_names = mailbox_names
6250 else:
6251 self.static_names = None
6252 if not isinstance(self.connection.mailbox_names, dict):
6253 self.get_mailboxes()
6254
6255 names = self.connection.mailbox_names.keys()
6256
6257 for name in names:
6258 self.db.define_table("%s" % name,
6259 Field("uid", "string", writable=False),
6260 Field("answered", "boolean"),
6261 Field("created", "datetime", writable=False),
6262 Field("content", list, writable=False),
6263 Field("to", "string", writable=False),
6264 Field("cc", "string", writable=False),
6265 Field("bcc", "string", writable=False),
6266 Field("size", "integer", writable=False),
6267 Field("deleted", "boolean"),
6268 Field("draft", "boolean"),
6269 Field("flagged", "boolean"),
6270 Field("sender", "string", writable=False),
6271 Field("recent", "boolean", writable=False),
6272 Field("seen", "boolean"),
6273 Field("subject", "string", writable=False),
6274 Field("mime", "string", writable=False),
6275 Field("email", "string", writable=False, readable=False),
6276 Field("attachments", list, writable=False, readable=False),
6277 Field("encoding", writable=False)
6278 )
6279
6280
6281
6282 self.db[name].mailbox = \
6283 self.connection.mailbox_names[name]
6284
6285
6286 self.db[name].to.represent = self.db[name].cc.represent = \
6287 self.db[name].bcc.represent = self.db[name].sender.represent = \
6288 self.db[name].subject.represent = self.header_represent
6289
6290
6291 self.db.mailboxes = self.connection.mailbox_names
6292 return self.db.mailboxes
6293
6298
6299 - def _select(self, query, fields, attributes):
6303
6304 - def select(self, query, fields, attributes):
6305 """ Search and Fetch records and return web2py rows
6306 """
6307
6308 if use_common_filters(query):
6309 query = self.common_filter(query, [self.get_query_mailbox(query),])
6310
6311 import email
6312
6313
6314 tablename = None
6315 fetch_results = list()
6316
6317 if isinstance(query, Query):
6318 tablename = self.get_table(query)
6319 mailbox = self.connection.mailbox_names.get(tablename, None)
6320 if mailbox is None:
6321 raise ValueError("Mailbox name not found: %s" % mailbox)
6322 else:
6323
6324 result, selected = self.connection.select(mailbox, True)
6325 if result != "OK":
6326 raise Exception("IMAP error: %s" % selected)
6327 self.mailbox_size = int(selected[0])
6328 search_query = "(%s)" % str(query).strip()
6329 search_result = self.connection.uid("search", None, search_query)
6330
6331 if search_result[0] == "OK":
6332
6333
6334
6335
6336 limitby = attributes.get('limitby', None)
6337 messages_set = search_result[1][0].split()
6338
6339 messages_set.reverse()
6340 if limitby is not None:
6341
6342 messages_set = messages_set[int(limitby[0]):int(limitby[1])]
6343
6344
6345 if any([(field.name in ["content", "size",
6346 "attachments", "email"]) for
6347 field in fields]):
6348 imap_fields = "(RFC822 FLAGS)"
6349 else:
6350 imap_fields = "(RFC822.HEADER FLAGS)"
6351
6352 if len(messages_set) > 0:
6353
6354
6355
6356
6357 for uid in messages_set:
6358
6359 typ, data = self.connection.uid("fetch", uid, imap_fields)
6360 if typ == "OK":
6361 fr = {"message": int(data[0][0].split()[0]),
6362 "uid": long(uid),
6363 "email": email.message_from_string(data[0][1]),
6364 "raw_message": data[0][1]}
6365 fr["multipart"] = fr["email"].is_multipart()
6366
6367 fr["flags"] = self.driver.ParseFlags(data[1])
6368 fetch_results.append(fr)
6369 else:
6370
6371 raise Exception("IMAP error retrieving the body: %s" % data)
6372 else:
6373 raise Exception("IMAP search error: %s" % search_result[1])
6374 elif isinstance(query, (Expression, basestring)):
6375 raise NotImplementedError()
6376 else:
6377 raise TypeError("Unexpected query type")
6378
6379 imapqry_dict = {}
6380 imapfields_dict = {}
6381
6382 if len(fields) == 1 and isinstance(fields[0], SQLALL):
6383 allfields = True
6384 elif len(fields) == 0:
6385 allfields = True
6386 else:
6387 allfields = False
6388 if allfields:
6389 colnames = ["%s.%s" % (tablename, field) for field in self.search_fields.keys()]
6390 else:
6391 colnames = ["%s.%s" % (tablename, field.name) for field in fields]
6392
6393 for k in colnames:
6394 imapfields_dict[k] = k
6395
6396 imapqry_list = list()
6397 imapqry_array = list()
6398 for fr in fetch_results:
6399 attachments = []
6400 content = []
6401 size = 0
6402 n = int(fr["message"])
6403 item_dict = dict()
6404 message = fr["email"]
6405 uid = fr["uid"]
6406 charset = self.get_charset(message)
6407 flags = fr["flags"]
6408 raw_message = fr["raw_message"]
6409
6410
6411
6412
6413
6414
6415
6416
6417
6418
6419 if "%s.id" % tablename in colnames:
6420 item_dict["%s.id" % tablename] = n
6421 if "%s.created" % tablename in colnames:
6422 item_dict["%s.created" % tablename] = self.convert_date(message["Date"])
6423 if "%s.uid" % tablename in colnames:
6424 item_dict["%s.uid" % tablename] = uid
6425 if "%s.sender" % tablename in colnames:
6426
6427
6428
6429 item_dict["%s.sender" % tablename] = message["From"]
6430 if "%s.to" % tablename in colnames:
6431 item_dict["%s.to" % tablename] = message["To"]
6432 if "%s.cc" % tablename in colnames:
6433 if "Cc" in message.keys():
6434 item_dict["%s.cc" % tablename] = message["Cc"]
6435 else:
6436 item_dict["%s.cc" % tablename] = ""
6437 if "%s.bcc" % tablename in colnames:
6438 if "Bcc" in message.keys():
6439 item_dict["%s.bcc" % tablename] = message["Bcc"]
6440 else:
6441 item_dict["%s.bcc" % tablename] = ""
6442 if "%s.deleted" % tablename in colnames:
6443 item_dict["%s.deleted" % tablename] = "\\Deleted" in flags
6444 if "%s.draft" % tablename in colnames:
6445 item_dict["%s.draft" % tablename] = "\\Draft" in flags
6446 if "%s.flagged" % tablename in colnames:
6447 item_dict["%s.flagged" % tablename] = "\\Flagged" in flags
6448 if "%s.recent" % tablename in colnames:
6449 item_dict["%s.recent" % tablename] = "\\Recent" in flags
6450 if "%s.seen" % tablename in colnames:
6451 item_dict["%s.seen" % tablename] = "\\Seen" in flags
6452 if "%s.subject" % tablename in colnames:
6453 item_dict["%s.subject" % tablename] = message["Subject"]
6454 if "%s.answered" % tablename in colnames:
6455 item_dict["%s.answered" % tablename] = "\\Answered" in flags
6456 if "%s.mime" % tablename in colnames:
6457 item_dict["%s.mime" % tablename] = message.get_content_type()
6458 if "%s.encoding" % tablename in colnames:
6459 item_dict["%s.encoding" % tablename] = charset
6460
6461
6462
6463
6464
6465
6466 if "%s.email" % tablename in colnames:
6467
6468 item_dict["%s.email" % tablename] = raw_message
6469
6470
6471
6472
6473
6474
6475 for part in message.walk():
6476 maintype = part.get_content_maintype()
6477 if ("%s.attachments" % tablename in colnames) or \
6478 ("%s.content" % tablename in colnames):
6479 payload = part.get_payload(decode=True)
6480 if payload:
6481 filename = part.get_filename()
6482 values = {"mime": part.get_content_type()}
6483 if ((filename or not "text" in maintype) and
6484 ("%s.attachments" % tablename in colnames)):
6485 values.update({"payload": payload,
6486 "filename": filename,
6487 "encoding": part.get_content_charset(),
6488 "disposition": part["Content-Disposition"]})
6489 attachments.append(values)
6490 elif (("text" in maintype) and
6491 ("%s.content" % tablename in colnames)):
6492 values.update({"text": self.encode_text(payload,
6493 self.get_charset(part))})
6494 content.append(values)
6495
6496 if "%s.size" % tablename in colnames:
6497 if part is not None:
6498 size += len(str(part))
6499 item_dict["%s.content" % tablename] = content
6500 item_dict["%s.attachments" % tablename] = attachments
6501 item_dict["%s.size" % tablename] = size
6502 imapqry_list.append(item_dict)
6503
6504
6505
6506 for item_dict in imapqry_list:
6507 imapqry_array_item = list()
6508 for fieldname in colnames:
6509 imapqry_array_item.append(item_dict[fieldname])
6510 imapqry_array.append(imapqry_array_item)
6511
6512
6513 colnames = colnames
6514 processor = attributes.get('processor',self.parse)
6515 return processor(imapqry_array, fields, colnames)
6516
6517 - def _insert(self, table, fields):
6518 def add_payload(message, obj):
6519 payload = Message()
6520 payload.set_charset(obj.get("encoding", "utf-8"))
6521 mime = obj.get("mime", None)
6522 if mime:
6523 payload.set_type(mime)
6524 if "text" in obj:
6525 payload.set_payload(obj["text"])
6526 elif "payload" in obj:
6527 payload.set_payload(obj["payload"])
6528 if "filename" in obj and obj["filename"]:
6529 payload.add_header("Content-Disposition",
6530 "attachment", filename=obj["filename"])
6531 message.attach(payload)
6532
6533 mailbox = table.mailbox
6534 d = dict(((k.name, v) for k, v in fields))
6535 date_time = (d.get("created", datetime.datetime.now())).timetuple()
6536 if len(d) > 0:
6537 message = d.get("email", None)
6538 attachments = d.get("attachments", [])
6539 content = d.get("content", [])
6540 flags = " ".join(["\\%s" % flag.capitalize() for flag in
6541 ("answered", "deleted", "draft", "flagged",
6542 "recent", "seen") if d.get(flag, False)])
6543 if not message:
6544 from email.message import Message
6545 mime = d.get("mime", None)
6546 charset = d.get("encoding", None)
6547 message = Message()
6548 message["from"] = d.get("sender", "")
6549 message["subject"] = d.get("subject", "")
6550 if mime:
6551 message.set_type(mime)
6552 if charset:
6553 message.set_charset(charset)
6554 for item in ("to", "cc", "bcc"):
6555 value = d.get(item, "")
6556 if isinstance(value, basestring):
6557 message[item] = value
6558 else:
6559 message[item] = ";".join([i for i in
6560 value])
6561 if (not message.is_multipart() and
6562 (not message.get_content_type().startswith(
6563 "multipart"))):
6564 if isinstance(content, basestring):
6565 message.set_payload(content)
6566 elif len(content) > 0:
6567 message.set_payload(content[0]["text"])
6568 else:
6569 [add_payload(message, c) for c in content]
6570 [add_payload(message, a) for a in attachments]
6571 message = message.as_string()
6572 return (mailbox, flags, date_time, message)
6573 else:
6574 raise NotImplementedError("IMAP empty insert is not implemented")
6575
6576 - def insert(self, table, fields):
6577 values = self._insert(table, fields)
6578 result, data = self.connection.append(*values)
6579 if result == "OK":
6580 uid = int(re.findall("\d+", str(data))[-1])
6581 return self.db(table.uid==uid).select(table.id).first().id
6582 else:
6583 raise Exception("IMAP message append failed: %s" % data)
6584
6585 - def _update(self, tablename, query, fields, commit=False):
6586
6587 commands = list()
6588 if use_common_filters(query):
6589 query = self.common_filter(query, [tablename,])
6590 mark = []
6591 unmark = []
6592 if query:
6593 for item in fields:
6594 field = item[0]
6595 name = field.name
6596 value = item[1]
6597 if self.is_flag(name):
6598 flag = self.search_fields[name]
6599 if (value is not None) and (flag != "\\Recent"):
6600 if value:
6601 mark.append(flag)
6602 else:
6603 unmark.append(flag)
6604 result, data = self.connection.select(
6605 self.connection.mailbox_names[tablename])
6606 string_query = "(%s)" % query
6607 result, data = self.connection.search(None, string_query)
6608 store_list = [item.strip() for item in data[0].split()
6609 if item.strip().isdigit()]
6610
6611 for number in store_list:
6612 result = None
6613 if len(mark) > 0:
6614 commands.append((number, "+FLAGS", "(%s)" % " ".join(mark)))
6615 if len(unmark) > 0:
6616 commands.append((number, "-FLAGS", "(%s)" % " ".join(unmark)))
6617 return commands
6618
6619 - def update(self, tablename, query, fields):
6620 rowcount = 0
6621 commands = self._update(tablename, query, fields)
6622 for command in commands:
6623 result, data = self.connection.store(*command)
6624 if result == "OK":
6625 rowcount += 1
6626 else:
6627 raise Exception("IMAP storing error: %s" % data)
6628 return rowcount
6629
6630 - def _count(self, query, distinct=None):
6631 raise NotImplementedError()
6632
6633 - def count(self,query,distinct=None):
6645
6646 - def delete(self, tablename, query):
6647 counter = 0
6648 if query:
6649 if use_common_filters(query):
6650 query = self.common_filter(query, [tablename,])
6651 result, data = self.connection.select(self.connection.mailbox_names[tablename])
6652 string_query = "(%s)" % query
6653 result, data = self.connection.search(None, string_query)
6654 store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
6655 for number in store_list:
6656 result, data = self.connection.store(number, "+FLAGS", "(\\Deleted)")
6657 if result == "OK":
6658 counter += 1
6659 else:
6660 raise Exception("IMAP store error: %s" % data)
6661 if counter > 0:
6662 result, data = self.connection.expunge()
6663 return counter
6664
6665 - def BELONGS(self, first, second):
6666 result = None
6667 name = self.search_fields[first.name]
6668 if name == "MESSAGE":
6669 values = [str(val) for val in second if str(val).isdigit()]
6670 result = "%s" % ",".join(values).strip()
6671
6672 elif name == "UID":
6673 values = [str(val) for val in second if str(val).isdigit()]
6674 result = "UID %s" % ",".join(values).strip()
6675
6676 else:
6677 raise Exception("Operation not supported")
6678
6679 return result
6680
6681 - def CONTAINS(self, first, second, case_sensitive=False):
6682
6683 result = None
6684 name = self.search_fields[first.name]
6685
6686 if name in ("FROM", "TO", "SUBJECT", "TEXT"):
6687 result = "%s \"%s\"" % (name, self.expand(second))
6688 else:
6689 if first.name in ("cc", "bcc"):
6690 result = "%s \"%s\"" % (first.name.upper(), self.expand(second))
6691 elif first.name == "mime":
6692 result = "HEADER Content-Type \"%s\"" % self.expand(second)
6693 else:
6694 raise Exception("Operation not supported")
6695 return result
6696
6697 - def GT(self, first, second):
6698 result = None
6699 name = self.search_fields[first.name]
6700 if name == "MESSAGE":
6701 last_message = self.get_last_message(first.tablename)
6702 result = "%d:%d" % (int(self.expand(second)) + 1, last_message)
6703 elif name == "UID":
6704
6705
6706
6707 try:
6708 pedestal, threshold = self.get_uid_bounds(first.tablename)
6709 except TypeError:
6710 e = sys.exc_info()[1]
6711 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6712 return ""
6713 try:
6714 lower_limit = int(self.expand(second)) + 1
6715 except (ValueError, TypeError):
6716 e = sys.exc_info()[1]
6717 raise Exception("Operation not supported (non integer UID)")
6718 result = "UID %s:%s" % (lower_limit, threshold)
6719 elif name == "DATE":
6720 result = "SINCE %s" % self.convert_date(second, add=datetime.timedelta(1))
6721 elif name == "SIZE":
6722 result = "LARGER %s" % self.expand(second)
6723 else:
6724 raise Exception("Operation not supported")
6725 return result
6726
6727 - def GE(self, first, second):
6728 result = None
6729 name = self.search_fields[first.name]
6730 if name == "MESSAGE":
6731 last_message = self.get_last_message(first.tablename)
6732 result = "%s:%s" % (self.expand(second), last_message)
6733 elif name == "UID":
6734
6735
6736
6737 try:
6738 pedestal, threshold = self.get_uid_bounds(first.tablename)
6739 except TypeError:
6740 e = sys.exc_info()[1]
6741 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6742 return ""
6743 lower_limit = self.expand(second)
6744 result = "UID %s:%s" % (lower_limit, threshold)
6745 elif name == "DATE":
6746 result = "SINCE %s" % self.convert_date(second)
6747 else:
6748 raise Exception("Operation not supported")
6749 return result
6750
6751 - def LT(self, first, second):
6752 result = None
6753 name = self.search_fields[first.name]
6754 if name == "MESSAGE":
6755 result = "%s:%s" % (1, int(self.expand(second)) - 1)
6756 elif name == "UID":
6757 try:
6758 pedestal, threshold = self.get_uid_bounds(first.tablename)
6759 except TypeError:
6760 e = sys.exc_info()[1]
6761 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6762 return ""
6763 try:
6764 upper_limit = int(self.expand(second)) - 1
6765 except (ValueError, TypeError):
6766 e = sys.exc_info()[1]
6767 raise Exception("Operation not supported (non integer UID)")
6768 result = "UID %s:%s" % (pedestal, upper_limit)
6769 elif name == "DATE":
6770 result = "BEFORE %s" % self.convert_date(second)
6771 elif name == "SIZE":
6772 result = "SMALLER %s" % self.expand(second)
6773 else:
6774 raise Exception("Operation not supported")
6775 return result
6776
6777 - def LE(self, first, second):
6778 result = None
6779 name = self.search_fields[first.name]
6780 if name == "MESSAGE":
6781 result = "%s:%s" % (1, self.expand(second))
6782 elif name == "UID":
6783 try:
6784 pedestal, threshold = self.get_uid_bounds(first.tablename)
6785 except TypeError:
6786 e = sys.exc_info()[1]
6787 LOGGER.debug("Error requesting uid bounds: %s", str(e))
6788 return ""
6789 upper_limit = int(self.expand(second))
6790 result = "UID %s:%s" % (pedestal, upper_limit)
6791 elif name == "DATE":
6792 result = "BEFORE %s" % self.convert_date(second, add=datetime.timedelta(1))
6793 else:
6794 raise Exception("Operation not supported")
6795 return result
6796
6797 - def NE(self, first, second=None):
6798 if (second is None) and isinstance(first, Field):
6799
6800 if first.type == "id":
6801 return self.GE(first, 1)
6802 result = self.NOT(self.EQ(first, second))
6803 result = result.replace("NOT NOT", "").strip()
6804 return result
6805
6806 - def EQ(self,first,second):
6807 name = self.search_fields[first.name]
6808 result = None
6809 if name is not None:
6810 if name == "MESSAGE":
6811
6812 result = "%s" % self.expand(second)
6813 elif name == "UID":
6814 result = "UID %s" % self.expand(second)
6815 elif name == "DATE":
6816 result = "ON %s" % self.convert_date(second)
6817
6818 elif name in self.flags.values():
6819 if second:
6820 result = "%s" % (name.upper()[1:])
6821 else:
6822 result = "NOT %s" % (name.upper()[1:])
6823 else:
6824 raise Exception("Operation not supported")
6825 else:
6826 raise Exception("Operation not supported")
6827 return result
6828
6829 - def AND(self, first, second):
6832
6833 - def OR(self, first, second):
6836
6837 - def NOT(self, first):
6838 result = "NOT %s" % self.expand(first)
6839 return result
6840
6841
6842
6843
6844
6845 ADAPTERS = {
6846 'sqlite': SQLiteAdapter,
6847 'spatialite': SpatiaLiteAdapter,
6848 'sqlite:memory': SQLiteAdapter,
6849 'spatialite:memory': SpatiaLiteAdapter,
6850 'mysql': MySQLAdapter,
6851 'postgres': PostgreSQLAdapter,
6852 'postgres:psycopg2': PostgreSQLAdapter,
6853 'postgres:pg8000': PostgreSQLAdapter,
6854 'postgres2:psycopg2': NewPostgreSQLAdapter,
6855 'postgres2:pg8000': NewPostgreSQLAdapter,
6856 'oracle': OracleAdapter,
6857 'mssql': MSSQLAdapter,
6858 'mssql2': MSSQL2Adapter,
6859 'mssql3': MSSQL3Adapter,
6860 'vertica': VerticaAdapter,
6861 'sybase': SybaseAdapter,
6862 'db2': DB2Adapter,
6863 'teradata': TeradataAdapter,
6864 'informix': InformixAdapter,
6865 'informix-se': InformixSEAdapter,
6866 'firebird': FireBirdAdapter,
6867 'firebird_embedded': FireBirdAdapter,
6868 'ingres': IngresAdapter,
6869 'ingresu': IngresUnicodeAdapter,
6870 'sapdb': SAPDBAdapter,
6871 'cubrid': CubridAdapter,
6872 'jdbc:sqlite': JDBCSQLiteAdapter,
6873 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
6874 'jdbc:postgres': JDBCPostgreSQLAdapter,
6875 'gae': GoogleDatastoreAdapter,
6876 'google:datastore': GoogleDatastoreAdapter,
6877 'google:sql': GoogleSQLAdapter,
6878 'couchdb': CouchDBAdapter,
6879 'mongodb': MongoDBAdapter,
6880 'imap': IMAPAdapter
6881 }
6884 """
6885 Field type validation, using web2py's validators mechanism.
6886
6887 makes sure the content of a field is in line with the declared
6888 fieldtype
6889 """
6890 db = field.db
6891 try:
6892 from gluon import validators
6893 except ImportError:
6894 return []
6895 field_type, field_length = field.type, field.length
6896 if isinstance(field_type, SQLCustomType):
6897 if hasattr(field_type, 'validator'):
6898 return field_type.validator
6899 else:
6900 field_type = field_type.type
6901 elif not isinstance(field_type,str):
6902 return []
6903 requires=[]
6904 def ff(r,id):
6905 row=r(id)
6906 if not row:
6907 return id
6908 elif hasattr(r, '_format') and isinstance(r._format,str):
6909 return r._format % row
6910 elif hasattr(r, '_format') and callable(r._format):
6911 return r._format(row)
6912 else:
6913 return id
6914 if field_type in (('string', 'text', 'password')):
6915 requires.append(validators.IS_LENGTH(field_length))
6916 elif field_type == 'json':
6917 requires.append(validators.IS_EMPTY_OR(validators.IS_JSON(native_json=field.db._adapter.native_json)))
6918 elif field_type == 'double' or field_type == 'float':
6919 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
6920 elif field_type in ('integer','bigint'):
6921 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
6922 elif field_type.startswith('decimal'):
6923 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
6924 elif field_type == 'date':
6925 requires.append(validators.IS_DATE())
6926 elif field_type == 'time':
6927 requires.append(validators.IS_TIME())
6928 elif field_type == 'datetime':
6929 requires.append(validators.IS_DATETIME())
6930 elif db and field_type.startswith('reference') and \
6931 field_type.find('.') < 0 and \
6932 field_type[10:] in db.tables:
6933 referenced = db[field_type[10:]]
6934 def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id)
6935 field.represent = field.represent or repr_ref
6936 if hasattr(referenced, '_format') and referenced._format:
6937 requires = validators.IS_IN_DB(db,referenced._id,
6938 referenced._format)
6939 if field.unique:
6940 requires._and = validators.IS_NOT_IN_DB(db,field)
6941 if field.tablename == field_type[10:]:
6942 return validators.IS_EMPTY_OR(requires)
6943 return requires
6944 elif db and field_type.startswith('list:reference') and \
6945 field_type.find('.') < 0 and \
6946 field_type[15:] in db.tables:
6947 referenced = db[field_type[15:]]
6948 def list_ref_repr(ids, row=None, r=referenced, f=ff):
6949 if not ids:
6950 return None
6951 refs = None
6952 db, id = r._db, r._id
6953 if isinstance(db._adapter, GoogleDatastoreAdapter):
6954 def count(values): return db(id.belongs(values)).select(id)
6955 rx = range(0, len(ids), 30)
6956 refs = reduce(lambda a,b:a&b, [count(ids[i:i+30]) for i in rx])
6957 else:
6958 refs = db(id.belongs(ids)).select(id)
6959 return (refs and ', '.join(f(r,x.id) for x in refs) or '')
6960 field.represent = field.represent or list_ref_repr
6961 if hasattr(referenced, '_format') and referenced._format:
6962 requires = validators.IS_IN_DB(db,referenced._id,
6963 referenced._format,multiple=True)
6964 else:
6965 requires = validators.IS_IN_DB(db,referenced._id,
6966 multiple=True)
6967 if field.unique:
6968 requires._and = validators.IS_NOT_IN_DB(db,field)
6969 if not field.notnull:
6970 requires = validators.IS_EMPTY_OR(requires)
6971 return requires
6972 elif field_type.startswith('list:'):
6973 def repr_list(values,row=None): return', '.join(str(v) for v in (values or []))
6974 field.represent = field.represent or repr_list
6975 if field.unique:
6976 requires.insert(0,validators.IS_NOT_IN_DB(db,field))
6977 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
6978 if field.notnull and not field_type[:2] in sff:
6979 requires.insert(0, validators.IS_NOT_EMPTY())
6980 elif not field.notnull and field_type[:2] in sff and requires:
6981 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
6982 return requires
6983
6986 return str(item).replace('|', '||')
6987
6990
6992 if not hasattr(value,'split') and hasattr(value,'read'):
6993 value = value.read()
6994 return [long(x) for x in value.split('|') if x.strip()]
6995
6999
7000
7001 -class Row(object):
7002
7003 """
7004 a dictionary that lets you do d['a'] as well as d.a
7005 this is only used to store a Row
7006 """
7007
7008 __init__ = lambda self,*args,**kwargs: self.__dict__.update(*args,**kwargs)
7009
7011 key=str(k)
7012 _extra = self.__dict__.get('_extra', None)
7013 if _extra is not None:
7014 v = _extra.get(key, DEFAULT)
7015 if v != DEFAULT:
7016 return v
7017 m = REGEX_TABLE_DOT_FIELD.match(key)
7018 if m:
7019 try:
7020 return ogetattr(self, m.group(1))[m.group(2)]
7021 except (KeyError,AttributeError,TypeError):
7022 key = m.group(2)
7023 try:
7024 return ogetattr(self, key)
7025 except (KeyError,AttributeError,TypeError), ae:
7026 try:
7027 self[key] = ogetattr(self,'__get_lazy_reference__')(key)
7028 return self[key]
7029 except:
7030 raise ae
7031
7032 __setitem__ = lambda self, key, value: setattr(self, str(key), value)
7033
7034 __delitem__ = object.__delattr__
7035
7036 __copy__ = lambda self: Row(self)
7037
7038 __call__ = __getitem__
7039
7040
7041 - def get(self, key, default=None):
7042 try:
7043 return self.__getitem__(key)
7044 except(KeyError, AttributeError, TypeError):
7045 return self.__dict__.get(key,default)
7046
7047 has_key = __contains__ = lambda self, key: key in self.__dict__
7048
7049 __nonzero__ = lambda self: len(self.__dict__)>0
7050
7051 update = lambda self, *args, **kwargs: self.__dict__.update(*args, **kwargs)
7052
7053 keys = lambda self: self.__dict__.keys()
7054
7055 items = lambda self: self.__dict__.items()
7056
7057 values = lambda self: self.__dict__.values()
7058
7059 __iter__ = lambda self: self.__dict__.__iter__()
7060
7061 iteritems = lambda self: self.__dict__.iteritems()
7062
7063 __str__ = __repr__ = lambda self: '<Row %s>' % self.as_dict()
7064
7065 __int__ = lambda self: object.__getattribute__(self,'id')
7066
7067 __long__ = lambda self: long(object.__getattribute__(self,'id'))
7068
7069 __getattr__ = __getitem__
7070
7071
7072
7073
7074
7075
7076
7077
7078
7079
7081 try:
7082 return self.as_dict() == other.as_dict()
7083 except AttributeError:
7084 return False
7085
7087 return not (self == other)
7088
7090 return Row(dict(self))
7091
7092 - def as_dict(self, datetime_to_str=False, custom_types=None):
7093 SERIALIZABLE_TYPES = [str, unicode, int, long, float, bool, list, dict]
7094 if isinstance(custom_types,(list,tuple,set)):
7095 SERIALIZABLE_TYPES += custom_types
7096 elif custom_types:
7097 SERIALIZABLE_TYPES.append(custom_types)
7098 d = dict(self)
7099 for k in copy.copy(d.keys()):
7100 v=d[k]
7101 if d[k] is None:
7102 continue
7103 elif isinstance(v,Row):
7104 d[k]=v.as_dict()
7105 elif isinstance(v,Reference):
7106 d[k]=long(v)
7107 elif isinstance(v,decimal.Decimal):
7108 d[k]=float(v)
7109 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
7110 if datetime_to_str:
7111 d[k] = v.isoformat().replace('T',' ')[:19]
7112 elif not isinstance(v,tuple(SERIALIZABLE_TYPES)):
7113 del d[k]
7114 return d
7115
7116 - def as_xml(self, row_name="row", colnames=None, indent=' '):
7117 def f(row,field,indent=' '):
7118 if isinstance(row,Row):
7119 spc = indent+' \n'
7120 items = [f(row[x],x,indent+' ') for x in row]
7121 return '%s<%s>\n%s\n%s</%s>' % (
7122 indent,
7123 field,
7124 spc.join(item for item in items if item),
7125 indent,
7126 field)
7127 elif not callable(row):
7128 if REGEX_ALPHANUMERIC.match(field):
7129 return '%s<%s>%s</%s>' % (indent,field,row,field)
7130 else:
7131 return '%s<extra name="%s">%s</extra>' % \
7132 (indent,field,row)
7133 else:
7134 return None
7135 return f(self, row_name, indent=indent)
7136
7137 - def as_json(self, mode="object", default=None, colnames=None,
7138 serialize=True, **kwargs):
7139 """
7140 serializes the row to a JSON object
7141 kwargs are passed to .as_dict method
7142 only "object" mode supported
7143
7144 serialize = False used by Rows.as_json
7145 TODO: return array mode with query column order
7146
7147 mode and colnames are not implemented
7148 """
7149
7150 item = self.as_dict(**kwargs)
7151 if serialize:
7152 if have_serializers:
7153 return serializers.json(item,
7154 default=default or
7155 serializers.custom_json)
7156 elif simplejson:
7157 return simplejson.dumps(item)
7158 else:
7159 raise RuntimeError("missing simplejson")
7160 else:
7161 return item
7162
7172
7174 if not isinstance(fields,(list,tuple)):
7175 fields = [fields]
7176 new_fields = []
7177 for field in fields:
7178 if isinstance(field,Field):
7179 new_fields.append(field)
7180 elif isinstance(field,Table):
7181 for ofield in field:
7182 new_fields.append(ofield)
7183 else:
7184 raise RuntimeError("fields must be a list of fields")
7185 fields = new_fields
7186 field_map = {}
7187 for field in fields:
7188 n = field.name.lower()
7189 if not n in field_map:
7190 field_map[n] = field
7191 n = str(field).lower()
7192 if not n in field_map:
7193 field_map[n] = field
7194 constants = {}
7195 i = 0
7196 while True:
7197 m = REGEX_CONST_STRING.search(text)
7198 if not m: break
7199 text = text[:m.start()]+('#%i' % i)+text[m.end():]
7200 constants[str(i)] = m.group()[1:-1]
7201 i+=1
7202 text = re.sub('\s+',' ',text).lower()
7203 for a,b in [('&','and'),
7204 ('|','or'),
7205 ('~','not'),
7206 ('==','='),
7207 ('<','<'),
7208 ('>','>'),
7209 ('<=','<='),
7210 ('>=','>='),
7211 ('<>','!='),
7212 ('=<','<='),
7213 ('=>','>='),
7214 ('=','='),
7215 (' less or equal than ','<='),
7216 (' greater or equal than ','>='),
7217 (' equal or less than ','<='),
7218 (' equal or greater than ','>='),
7219 (' less or equal ','<='),
7220 (' greater or equal ','>='),
7221 (' equal or less ','<='),
7222 (' equal or greater ','>='),
7223 (' not equal to ','!='),
7224 (' not equal ','!='),
7225 (' equal to ','='),
7226 (' equal ','='),
7227 (' equals ','='),
7228 (' less than ','<'),
7229 (' greater than ','>'),
7230 (' starts with ','startswith'),
7231 (' ends with ','endswith'),
7232 (' not in ' , 'notbelongs'),
7233 (' in ' , 'belongs'),
7234 (' is ','=')]:
7235 if a[0]==' ':
7236 text = text.replace(' is'+a,' %s ' % b)
7237 text = text.replace(a,' %s ' % b)
7238 text = re.sub('\s+',' ',text).lower()
7239 text = re.sub('(?P<a>[\<\>\!\=])\s+(?P<b>[\<\>\!\=])','\g<a>\g<b>',text)
7240 query = field = neg = op = logic = None
7241 for item in text.split():
7242 if field is None:
7243 if item == 'not':
7244 neg = True
7245 elif not neg and not logic and item in ('and','or'):
7246 logic = item
7247 elif item in field_map:
7248 field = field_map[item]
7249 else:
7250 raise RuntimeError("Invalid syntax")
7251 elif not field is None and op is None:
7252 op = item
7253 elif not op is None:
7254 if item.startswith('#'):
7255 if not item[1:] in constants:
7256 raise RuntimeError("Invalid syntax")
7257 value = constants[item[1:]]
7258 else:
7259 value = item
7260 if field.type in ('text', 'string', 'json'):
7261 if op == '=': op = 'like'
7262 if op == '=': new_query = field==value
7263 elif op == '<': new_query = field<value
7264 elif op == '>': new_query = field>value
7265 elif op == '<=': new_query = field<=value
7266 elif op == '>=': new_query = field>=value
7267 elif op == '!=': new_query = field!=value
7268 elif op == 'belongs': new_query = field.belongs(value.split(','))
7269 elif op == 'notbelongs': new_query = ~field.belongs(value.split(','))
7270 elif field.type in ('text', 'string', 'json'):
7271 if op == 'contains': new_query = field.contains(value)
7272 elif op == 'like': new_query = field.like(value)
7273 elif op == 'startswith': new_query = field.startswith(value)
7274 elif op == 'endswith': new_query = field.endswith(value)
7275 else: raise RuntimeError("Invalid operation")
7276 elif field._db._adapter.dbengine=='google:datastore' and \
7277 field.type in ('list:integer', 'list:string', 'list:reference'):
7278 if op == 'contains': new_query = field.contains(value)
7279 else: raise RuntimeError("Invalid operation")
7280 else: raise RuntimeError("Invalid operation")
7281 if neg: new_query = ~new_query
7282 if query is None:
7283 query = new_query
7284 elif logic == 'and':
7285 query &= new_query
7286 elif logic == 'or':
7287 query |= new_query
7288 field = op = neg = logic = None
7289 return query
7290
7292
7293 """
7294 an instance of this class represents a database connection
7295
7296 Example::
7297
7298 db = DAL('sqlite://test.db')
7299
7300 or
7301
7302 db = DAL(**{"uri": ..., "tables": [...]...}) # experimental
7303
7304 db.define_table('tablename', Field('fieldname1'),
7305 Field('fieldname2'))
7306 """
7307
7308 - def __new__(cls, uri='sqlite://dummy.db', *args, **kwargs):
7335
7336 @staticmethod
7338 """
7339 # ## this allows gluon to set a folder for this thread
7340 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
7341 """
7342 BaseAdapter.set_folder(folder)
7343
7344 @staticmethod
7346 """
7347 Returns a dictionary with uri as key with timings and defined tables
7348 {'sqlite://storage.sqlite': {
7349 'dbstats': [(select auth_user.email from auth_user, 0.02009)],
7350 'dbtables': {
7351 'defined': ['auth_cas', 'auth_event', 'auth_group',
7352 'auth_membership', 'auth_permission', 'auth_user'],
7353 'lazy': '[]'
7354 }
7355 }
7356 }
7357 """
7358 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
7359 infos = {}
7360 for db_uid, db_group in dbs:
7361 for db in db_group:
7362 if not db._uri:
7363 continue
7364 k = hide_password(db._uri)
7365 infos[k] = dict(dbstats = [(row[0], row[1]) for row in db._timings],
7366 dbtables = {'defined':
7367 sorted(list(set(db.tables) -
7368 set(db._LAZY_TABLES.keys()))),
7369 'lazy': sorted(db._LAZY_TABLES.keys())}
7370 )
7371 return infos
7372
7373 @staticmethod
7386
7387 @staticmethod
7409
7410 - def __init__(self, uri=DEFAULT_URI,
7411 pool_size=0, folder=None,
7412 db_codec='UTF-8', check_reserved=None,
7413 migrate=True, fake_migrate=False,
7414 migrate_enabled=True, fake_migrate_all=False,
7415 decode_credentials=False, driver_args=None,
7416 adapter_args=None, attempts=5, auto_import=False,
7417 bigint_id=False, debug=False, lazy_tables=False,
7418 db_uid=None, do_connect=True,
7419 after_connection=None, tables=None):
7420 """
7421 Creates a new Database Abstraction Layer instance.
7422
7423 Keyword arguments:
7424
7425 :uri: string that contains information for connecting to a database.
7426 (default: 'sqlite://dummy.db')
7427
7428 experimental: you can specify a dictionary as uri
7429 parameter i.e. with
7430 db = DAL({"uri": "sqlite://storage.sqlite",
7431 "tables": {...}, ...})
7432
7433 for an example of dict input you can check the output
7434 of the scaffolding db model with
7435
7436 db.as_dict()
7437
7438 Note that for compatibility with Python older than
7439 version 2.6.5 you should cast your dict input keys
7440 to str due to a syntax limitation on kwarg names.
7441 for proper DAL dictionary input you can use one of:
7442
7443 obj = serializers.cast_keys(dict, [encoding="utf-8"])
7444
7445 or else (for parsing json input)
7446
7447 obj = serializers.loads_json(data, unicode_keys=False)
7448
7449 :pool_size: How many open connections to make to the database object.
7450 :folder: where .table files will be created.
7451 automatically set within web2py
7452 use an explicit path when using DAL outside web2py
7453 :db_codec: string encoding of the database (default: 'UTF-8')
7454 :check_reserved: list of adapters to check tablenames and column names
7455 against sql/nosql reserved keywords. (Default None)
7456
7457 * 'common' List of sql keywords that are common to all database types
7458 such as "SELECT, INSERT". (recommended)
7459 * 'all' Checks against all known SQL keywords. (not recommended)
7460 <adaptername> Checks against the specific adapters list of keywords
7461 (recommended)
7462 * '<adaptername>_nonreserved' Checks against the specific adapters
7463 list of nonreserved keywords. (if available)
7464 :migrate (defaults to True) sets default migrate behavior for all tables
7465 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
7466 :migrate_enabled (defaults to True). If set to False disables ALL migrations
7467 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
7468 :attempts (defaults to 5). Number of times to attempt connecting
7469 :auto_import (defaults to False). If set, import automatically table definitions from the
7470 databases folder
7471 :bigint_id (defaults to False): If set, turn on bigint instead of int for id fields
7472 :lazy_tables (defaults to False): delay table definition until table access
7473 :after_connection (defaults to None): a callable that will be execute after the connection
7474 """
7475 if uri == '<zombie>' and db_uid is not None: return
7476 if not decode_credentials:
7477 credential_decoder = lambda cred: cred
7478 else:
7479 credential_decoder = lambda cred: urllib.unquote(cred)
7480 self._folder = folder
7481 if folder:
7482 self.set_folder(folder)
7483 self._uri = uri
7484 self._pool_size = pool_size
7485 self._db_codec = db_codec
7486 self._lastsql = ''
7487 self._timings = []
7488 self._pending_references = {}
7489 self._request_tenant = 'request_tenant'
7490 self._common_fields = []
7491 self._referee_name = '%(table)s'
7492 self._bigint_id = bigint_id
7493 self._debug = debug
7494 self._migrated = []
7495 self._LAZY_TABLES = {}
7496 self._lazy_tables = lazy_tables
7497 self._tables = SQLCallableList()
7498 self._driver_args = driver_args
7499 self._adapter_args = adapter_args
7500 self._check_reserved = check_reserved
7501 self._decode_credentials = decode_credentials
7502 self._attempts = attempts
7503 self._do_connect = do_connect
7504
7505 if not str(attempts).isdigit() or attempts < 0:
7506 attempts = 5
7507 if uri:
7508 uris = isinstance(uri,(list,tuple)) and uri or [uri]
7509 error = ''
7510 connected = False
7511 for k in range(attempts):
7512 for uri in uris:
7513 try:
7514 if is_jdbc and not uri.startswith('jdbc:'):
7515 uri = 'jdbc:'+uri
7516 self._dbname = REGEX_DBNAME.match(uri).group()
7517 if not self._dbname in ADAPTERS:
7518 raise SyntaxError("Error in URI '%s' or database not supported" % self._dbname)
7519
7520
7521 kwargs = dict(db=self,uri=uri,
7522 pool_size=pool_size,
7523 folder=folder,
7524 db_codec=db_codec,
7525 credential_decoder=credential_decoder,
7526 driver_args=driver_args or {},
7527 adapter_args=adapter_args or {},
7528 do_connect=do_connect,
7529 after_connection=after_connection)
7530 self._adapter = ADAPTERS[self._dbname](**kwargs)
7531 types = ADAPTERS[self._dbname].types
7532
7533 self._adapter.types = copy.copy(types)
7534 self._adapter.build_parsemap()
7535 if bigint_id:
7536 if 'big-id' in types and 'reference' in types:
7537 self._adapter.types['id'] = types['big-id']
7538 self._adapter.types['reference'] = types['big-reference']
7539 connected = True
7540 break
7541 except SyntaxError:
7542 raise
7543 except Exception:
7544 tb = traceback.format_exc()
7545 sys.stderr.write('DEBUG: connect attempt %i, connection error:\n%s' % (k, tb))
7546 if connected:
7547 break
7548 else:
7549 time.sleep(1)
7550 if not connected:
7551 raise RuntimeError("Failure to connect, tried %d times:\n%s" % (attempts, tb))
7552 else:
7553 self._adapter = BaseAdapter(db=self,pool_size=0,
7554 uri='None',folder=folder,
7555 db_codec=db_codec, after_connection=after_connection)
7556 migrate = fake_migrate = False
7557 adapter = self._adapter
7558 self._uri_hash = hashlib_md5(adapter.uri).hexdigest()
7559 self.check_reserved = check_reserved
7560 if self.check_reserved:
7561 from reserved_sql_keywords import ADAPTERS as RSK
7562 self.RSK = RSK
7563 self._migrate = migrate
7564 self._fake_migrate = fake_migrate
7565 self._migrate_enabled = migrate_enabled
7566 self._fake_migrate_all = fake_migrate_all
7567 if auto_import or tables:
7568 self.import_table_definitions(adapter.folder,
7569 tables=tables)
7570
7571 @property
7574
7577 pattern = pjoin(path,self._uri_hash+'_*.table')
7578 if tables:
7579 for table in tables:
7580 self.define_table(**table)
7581 else:
7582 for filename in glob.glob(pattern):
7583 tfile = self._adapter.file_open(filename, 'r')
7584 try:
7585 sql_fields = pickle.load(tfile)
7586 name = filename[len(pattern)-7:-6]
7587 mf = [(value['sortable'],
7588 Field(key,
7589 type=value['type'],
7590 length=value.get('length',None),
7591 notnull=value.get('notnull',False),
7592 unique=value.get('unique',False))) \
7593 for key, value in sql_fields.iteritems()]
7594 mf.sort(lambda a,b: cmp(a[0],b[0]))
7595 self.define_table(name,*[item[1] for item in mf],
7596 **dict(migrate=migrate,
7597 fake_migrate=fake_migrate))
7598 finally:
7599 self._adapter.file_close(tfile)
7600
7602 """
7603 Validates ``name`` against SQL keywords
7604 Uses self.check_reserve which is a list of
7605 operators to use.
7606 self.check_reserved
7607 ['common', 'postgres', 'mysql']
7608 self.check_reserved
7609 ['all']
7610 """
7611 for backend in self.check_reserved:
7612 if name.upper() in self.RSK[backend]:
7613 raise SyntaxError(
7614 'invalid table/column name "%s" is a "%s" reserved SQL/NOSQL keyword' % (name, backend.upper()))
7615
7616 - def parse_as_rest(self,patterns,args,vars,queries=None,nested_select=True):
7617 """
7618 EXAMPLE:
7619
7620 db.define_table('person',Field('name'),Field('info'))
7621 db.define_table('pet',Field('ownedby',db.person),Field('name'),Field('info'))
7622
7623 @request.restful()
7624 def index():
7625 def GET(*args,**vars):
7626 patterns = [
7627 "/friends[person]",
7628 "/{person.name}/:field",
7629 "/{person.name}/pets[pet.ownedby]",
7630 "/{person.name}/pets[pet.ownedby]/{pet.name}",
7631 "/{person.name}/pets[pet.ownedby]/{pet.name}/:field",
7632 ("/dogs[pet]", db.pet.info=='dog'),
7633 ("/dogs[pet]/{pet.name.startswith}", db.pet.info=='dog'),
7634 ]
7635 parser = db.parse_as_rest(patterns,args,vars)
7636 if parser.status == 200:
7637 return dict(content=parser.response)
7638 else:
7639 raise HTTP(parser.status,parser.error)
7640
7641 def POST(table_name,**vars):
7642 if table_name == 'person':
7643 return db.person.validate_and_insert(**vars)
7644 elif table_name == 'pet':
7645 return db.pet.validate_and_insert(**vars)
7646 else:
7647 raise HTTP(400)
7648 return locals()
7649 """
7650
7651 db = self
7652 re1 = REGEX_SEARCH_PATTERN
7653 re2 = REGEX_SQUARE_BRACKETS
7654
7655 def auto_table(table,base='',depth=0):
7656 patterns = []
7657 for field in db[table].fields:
7658 if base:
7659 tag = '%s/%s' % (base,field.replace('_','-'))
7660 else:
7661 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
7662 f = db[table][field]
7663 if not f.readable: continue
7664 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
7665 tag += '/{%s.%s}' % (table,field)
7666 patterns.append(tag)
7667 patterns.append(tag+'/:field')
7668 elif f.type.startswith('boolean'):
7669 tag += '/{%s.%s}' % (table,field)
7670 patterns.append(tag)
7671 patterns.append(tag+'/:field')
7672 elif f.type in ('float','double','integer','bigint'):
7673 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
7674 patterns.append(tag)
7675 patterns.append(tag+'/:field')
7676 elif f.type.startswith('list:'):
7677 tag += '/{%s.%s.contains}' % (table,field)
7678 patterns.append(tag)
7679 patterns.append(tag+'/:field')
7680 elif f.type in ('date','datetime'):
7681 tag+= '/{%s.%s.year}' % (table,field)
7682 patterns.append(tag)
7683 patterns.append(tag+'/:field')
7684 tag+='/{%s.%s.month}' % (table,field)
7685 patterns.append(tag)
7686 patterns.append(tag+'/:field')
7687 tag+='/{%s.%s.day}' % (table,field)
7688 patterns.append(tag)
7689 patterns.append(tag+'/:field')
7690 if f.type in ('datetime','time'):
7691 tag+= '/{%s.%s.hour}' % (table,field)
7692 patterns.append(tag)
7693 patterns.append(tag+'/:field')
7694 tag+='/{%s.%s.minute}' % (table,field)
7695 patterns.append(tag)
7696 patterns.append(tag+'/:field')
7697 tag+='/{%s.%s.second}' % (table,field)
7698 patterns.append(tag)
7699 patterns.append(tag+'/:field')
7700 if depth>0:
7701 for f in db[table]._referenced_by:
7702 tag+='/%s[%s.%s]' % (table,f.tablename,f.name)
7703 patterns.append(tag)
7704 patterns += auto_table(table,base=tag,depth=depth-1)
7705 return patterns
7706
7707 if patterns == 'auto':
7708 patterns=[]
7709 for table in db.tables:
7710 if not table.startswith('auth_'):
7711 patterns.append('/%s[%s]' % (table,table))
7712 patterns += auto_table(table,base='',depth=1)
7713 else:
7714 i = 0
7715 while i<len(patterns):
7716 pattern = patterns[i]
7717 if not isinstance(pattern,str):
7718 pattern = pattern[0]
7719 tokens = pattern.split('/')
7720 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
7721 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],
7722 '/'.join(tokens[:-1]))
7723 patterns = patterns[:i]+new_patterns+patterns[i+1:]
7724 i += len(new_patterns)
7725 else:
7726 i += 1
7727 if '/'.join(args) == 'patterns':
7728 return Row({'status':200,'pattern':'list',
7729 'error':None,'response':patterns})
7730 for pattern in patterns:
7731 basequery, exposedfields = None, []
7732 if isinstance(pattern,tuple):
7733 if len(pattern)==2:
7734 pattern, basequery = pattern
7735 elif len(pattern)>2:
7736 pattern, basequery, exposedfields = pattern[0:3]
7737 otable=table=None
7738 if not isinstance(queries,dict):
7739 dbset=db(queries)
7740 if basequery is not None:
7741 dbset = dbset(basequery)
7742 i=0
7743 tags = pattern[1:].split('/')
7744 if len(tags)!=len(args):
7745 continue
7746 for tag in tags:
7747 if re1.match(tag):
7748
7749 tokens = tag[1:-1].split('.')
7750 table, field = tokens[0], tokens[1]
7751 if not otable or table == otable:
7752 if len(tokens)==2 or tokens[2]=='eq':
7753 query = db[table][field]==args[i]
7754 elif tokens[2]=='ne':
7755 query = db[table][field]!=args[i]
7756 elif tokens[2]=='lt':
7757 query = db[table][field]<args[i]
7758 elif tokens[2]=='gt':
7759 query = db[table][field]>args[i]
7760 elif tokens[2]=='ge':
7761 query = db[table][field]>=args[i]
7762 elif tokens[2]=='le':
7763 query = db[table][field]<=args[i]
7764 elif tokens[2]=='year':
7765 query = db[table][field].year()==args[i]
7766 elif tokens[2]=='month':
7767 query = db[table][field].month()==args[i]
7768 elif tokens[2]=='day':
7769 query = db[table][field].day()==args[i]
7770 elif tokens[2]=='hour':
7771 query = db[table][field].hour()==args[i]
7772 elif tokens[2]=='minute':
7773 query = db[table][field].minutes()==args[i]
7774 elif tokens[2]=='second':
7775 query = db[table][field].seconds()==args[i]
7776 elif tokens[2]=='startswith':
7777 query = db[table][field].startswith(args[i])
7778 elif tokens[2]=='contains':
7779 query = db[table][field].contains(args[i])
7780 else:
7781 raise RuntimeError("invalid pattern: %s" % pattern)
7782 if len(tokens)==4 and tokens[3]=='not':
7783 query = ~query
7784 elif len(tokens)>=4:
7785 raise RuntimeError("invalid pattern: %s" % pattern)
7786 if not otable and isinstance(queries,dict):
7787 dbset = db(queries[table])
7788 if basequery is not None:
7789 dbset = dbset(basequery)
7790 dbset=dbset(query)
7791 else:
7792 raise RuntimeError("missing relation in pattern: %s" % pattern)
7793 elif re2.match(tag) and args[i]==tag[:tag.find('[')]:
7794 ref = tag[tag.find('[')+1:-1]
7795 if '.' in ref and otable:
7796 table,field = ref.split('.')
7797 selfld = '_id'
7798 if db[table][field].type.startswith('reference '):
7799 refs = [ x.name for x in db[otable] if x.type == db[table][field].type ]
7800 else:
7801 refs = [ x.name for x in db[table]._referenced_by if x.tablename==otable ]
7802 if refs:
7803 selfld = refs[0]
7804 if nested_select:
7805 try:
7806 dbset=db(db[table][field].belongs(dbset._select(db[otable][selfld])))
7807 except ValueError:
7808 return Row({'status':400,'pattern':pattern,
7809 'error':'invalid path','response':None})
7810 else:
7811 items = [item.id for item in dbset.select(db[otable][selfld])]
7812 dbset=db(db[table][field].belongs(items))
7813 else:
7814 table = ref
7815 if not otable and isinstance(queries,dict):
7816 dbset = db(queries[table])
7817 dbset=dbset(db[table])
7818 elif tag==':field' and table:
7819
7820 field = args[i]
7821 if not field in db[table]: break
7822
7823 if not db[table][field].readable:
7824 return Row({'status':418,'pattern':pattern,
7825 'error':'I\'m a teapot','response':None})
7826 try:
7827 distinct = vars.get('distinct', False) == 'True'
7828 offset = long(vars.get('offset',None) or 0)
7829 limits = (offset,long(vars.get('limit',None) or 1000)+offset)
7830 except ValueError:
7831 return Row({'status':400,'error':'invalid limits','response':None})
7832 items = dbset.select(db[table][field], distinct=distinct, limitby=limits)
7833 if items:
7834 return Row({'status':200,'response':items,
7835 'pattern':pattern})
7836 else:
7837 return Row({'status':404,'pattern':pattern,
7838 'error':'no record found','response':None})
7839 elif tag != args[i]:
7840 break
7841 otable = table
7842 i += 1
7843 if i==len(tags) and table:
7844 ofields = vars.get('order',db[table]._id.name).split('|')
7845 try:
7846 orderby = [db[table][f] if not f.startswith('~') else ~db[table][f[1:]] for f in ofields]
7847 except (KeyError, AttributeError):
7848 return Row({'status':400,'error':'invalid orderby','response':None})
7849 if exposedfields:
7850 fields = [field for field in db[table] if str(field).split('.')[-1] in exposedfields and field.readable]
7851 else:
7852 fields = [field for field in db[table] if field.readable]
7853 count = dbset.count()
7854 try:
7855 offset = long(vars.get('offset',None) or 0)
7856 limits = (offset,long(vars.get('limit',None) or 1000)+offset)
7857 except ValueError:
7858 return Row({'status':400,'error':'invalid limits','response':None})
7859 if count > limits[1]-limits[0]:
7860 return Row({'status':400,'error':'too many records','response':None})
7861 try:
7862 response = dbset.select(limitby=limits,orderby=orderby,*fields)
7863 except ValueError:
7864 return Row({'status':400,'pattern':pattern,
7865 'error':'invalid path','response':None})
7866 return Row({'status':200,'response':response,
7867 'pattern':pattern,'count':count})
7868 return Row({'status':400,'error':'no matching pattern','response':None})
7869
7870 - def define_table(
7871 self,
7872 tablename,
7873 *fields,
7874 **args
7875 ):
7876 if not fields and 'fields' in args:
7877 fields = args.get('fields',())
7878 if not isinstance(tablename, str):
7879 if isinstance(tablename, unicode):
7880 try:
7881 tablename = str(tablename)
7882 except UnicodeEncodeError:
7883 raise SyntaxError("invalid unicode table name")
7884 else:
7885 raise SyntaxError("missing table name")
7886 elif hasattr(self,tablename) or tablename in self.tables:
7887 if not args.get('redefine',False):
7888 raise SyntaxError('table already defined: %s' % tablename)
7889 elif tablename.startswith('_') or hasattr(self,tablename) or \
7890 REGEX_PYTHON_KEYWORDS.match(tablename):
7891 raise SyntaxError('invalid table name: %s' % tablename)
7892 elif self.check_reserved:
7893 self.check_reserved_keyword(tablename)
7894 else:
7895 invalid_args = set(args)-TABLE_ARGS
7896 if invalid_args:
7897 raise SyntaxError('invalid table "%s" attributes: %s' \
7898 % (tablename,invalid_args))
7899 if self._lazy_tables and not tablename in self._LAZY_TABLES:
7900 self._LAZY_TABLES[tablename] = (tablename,fields,args)
7901 table = None
7902 else:
7903 table = self.lazy_define_table(tablename,*fields,**args)
7904 if not tablename in self.tables:
7905 self.tables.append(tablename)
7906 return table
7907
7908 - def lazy_define_table(
7909 self,
7910 tablename,
7911 *fields,
7912 **args
7913 ):
7914 args_get = args.get
7915 common_fields = self._common_fields
7916 if common_fields:
7917 fields = list(fields) + list(common_fields)
7918
7919 table_class = args_get('table_class',Table)
7920 table = table_class(self, tablename, *fields, **args)
7921 table._actual = True
7922 self[tablename] = table
7923
7924 table._create_references()
7925 for field in table:
7926 if field.requires == DEFAULT:
7927 field.requires = sqlhtml_validators(field)
7928
7929 migrate = self._migrate_enabled and args_get('migrate',self._migrate)
7930 if migrate and not self._uri in (None,'None') \
7931 or self._adapter.dbengine=='google:datastore':
7932 fake_migrate = self._fake_migrate_all or \
7933 args_get('fake_migrate',self._fake_migrate)
7934 polymodel = args_get('polymodel',None)
7935 try:
7936 GLOBAL_LOCKER.acquire()
7937 self._lastsql = self._adapter.create_table(
7938 table,migrate=migrate,
7939 fake_migrate=fake_migrate,
7940 polymodel=polymodel)
7941 finally:
7942 GLOBAL_LOCKER.release()
7943 else:
7944 table._dbt = None
7945 on_define = args_get('on_define',None)
7946 if on_define: on_define(table)
7947 return table
7948
7949 - def as_dict(self, flat=False, sanitize=True):
7950 db_uid = uri = None
7951 if not sanitize:
7952 uri, db_uid = (self._uri, self._db_uid)
7953 db_as_dict = dict(tables=[], uri=uri, db_uid=db_uid,
7954 **dict([(k, getattr(self, "_" + k, None))
7955 for k in 'pool_size','folder','db_codec',
7956 'check_reserved','migrate','fake_migrate',
7957 'migrate_enabled','fake_migrate_all',
7958 'decode_credentials','driver_args',
7959 'adapter_args', 'attempts',
7960 'bigint_id','debug','lazy_tables',
7961 'do_connect']))
7962 for table in self:
7963 db_as_dict["tables"].append(table.as_dict(flat=flat,
7964 sanitize=sanitize))
7965 return db_as_dict
7966
7967 - def as_xml(self, sanitize=True):
7972
7973 - def as_json(self, sanitize=True):
7978
7979 - def as_yaml(self, sanitize=True):
7984
7986 try:
7987 return tablename in self.tables
7988 except AttributeError:
7989
7990 return False
7991
7992 has_key = __contains__
7993
7994 - def get(self,key,default=None):
7995 return self.__dict__.get(key,default)
7996
7998 for tablename in self.tables:
7999 yield self[tablename]
8000
8003
8005 if ogetattr(self,'_lazy_tables') and \
8006 key in ogetattr(self,'_LAZY_TABLES'):
8007 tablename, fields, args = self._LAZY_TABLES.pop(key)
8008 return self.lazy_define_table(tablename,*fields,**args)
8009 return ogetattr(self, key)
8010
8012 osetattr(self, str(key), value)
8013
8015 if key[:1]!='_' and key in self:
8016 raise SyntaxError(
8017 'Object %s exists and cannot be redefined' % key)
8018 osetattr(self,key,value)
8019
8020 __delitem__ = object.__delattr__
8021
8023 if hasattr(self,'_uri'):
8024 return '<DAL uri="%s">' % hide_password(str(self._uri))
8025 else:
8026 return '<DAL db_uid="%s">' % self._db_uid
8027
8030
8031 - def __call__(self, query=None, ignore_common_filters=None):
8032 if isinstance(query,Table):
8033 query = self._adapter.id_query(query)
8034 elif isinstance(query,Field):
8035 query = query!=None
8036 elif isinstance(query, dict):
8037 icf = query.get("ignore_common_filters")
8038 if icf: ignore_common_filters = icf
8039 return Set(self, query, ignore_common_filters=ignore_common_filters)
8040
8043
8046
8048 self._adapter.close()
8049 if self._db_uid in THREAD_LOCAL.db_instances:
8050 db_group = THREAD_LOCAL.db_instances[self._db_uid]
8051 db_group.remove(self)
8052 if not db_group:
8053 del THREAD_LOCAL.db_instances[self._db_uid]
8054
8055 - def executesql(self, query, placeholders=None, as_dict=False,
8056 fields=None, colnames=None):
8057 """
8058 placeholders is optional and will always be None.
8059 If using raw SQL with placeholders, placeholders may be
8060 a sequence of values to be substituted in
8061 or, (if supported by the DB driver), a dictionary with keys
8062 matching named placeholders in your SQL.
8063
8064 Added 2009-12-05 "as_dict" optional argument. Will always be
8065 None when using DAL. If using raw SQL can be set to True
8066 and the results cursor returned by the DB driver will be
8067 converted to a sequence of dictionaries keyed with the db
8068 field names. Tested with SQLite but should work with any database
8069 since the cursor.description used to get field names is part of the
8070 Python dbi 2.0 specs. Results returned with as_dict=True are
8071 the same as those returned when applying .to_list() to a DAL query.
8072
8073 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
8074
8075 Added 2012-08-24 "fields" and "colnames" optional arguments. If either
8076 is provided, the results cursor returned by the DB driver will be
8077 converted to a DAL Rows object using the db._adapter.parse() method.
8078
8079 The "fields" argument is a list of DAL Field objects that match the
8080 fields returned from the DB. The Field objects should be part of one or
8081 more Table objects defined on the DAL object. The "fields" list can
8082 include one or more DAL Table objects in addition to or instead of
8083 including Field objects, or it can be just a single table (not in a
8084 list). In that case, the Field objects will be extracted from the
8085 table(s).
8086
8087 Instead of specifying the "fields" argument, the "colnames" argument
8088 can be specified as a list of field names in tablename.fieldname format.
8089 Again, these should represent tables and fields defined on the DAL
8090 object.
8091
8092 It is also possible to specify both "fields" and the associated
8093 "colnames". In that case, "fields" can also include DAL Expression
8094 objects in addition to Field objects. For Field objects in "fields",
8095 the associated "colnames" must still be in tablename.fieldname format.
8096 For Expression objects in "fields", the associated "colnames" can
8097 be any arbitrary labels.
8098
8099 Note, the DAL Table objects referred to by "fields" or "colnames" can
8100 be dummy tables and do not have to represent any real tables in the
8101 database. Also, note that the "fields" and "colnames" must be in the
8102 same order as the fields in the results cursor returned from the DB.
8103 """
8104 adapter = self._adapter
8105 if placeholders:
8106 adapter.execute(query, placeholders)
8107 else:
8108 adapter.execute(query)
8109 if as_dict:
8110 if not hasattr(adapter.cursor,'description'):
8111 raise RuntimeError("database does not support executesql(...,as_dict=True)")
8112
8113
8114
8115 columns = adapter.cursor.description
8116
8117 fields = [f[0] for f in columns]
8118
8119 data = adapter._fetchall()
8120
8121
8122 return [dict(zip(fields,row)) for row in data]
8123 try:
8124 data = adapter._fetchall()
8125 except:
8126 return None
8127 if fields or colnames:
8128 fields = [] if fields is None else fields
8129 if not isinstance(fields, list):
8130 fields = [fields]
8131 extracted_fields = []
8132 for field in fields:
8133 if isinstance(field, Table):
8134 extracted_fields.extend([f for f in field])
8135 else:
8136 extracted_fields.append(field)
8137 if not colnames:
8138 colnames = ['%s.%s' % (f.tablename, f.name)
8139 for f in extracted_fields]
8140 data = adapter.parse(
8141 data, fields=extracted_fields, colnames=colnames)
8142 return data
8143
8145 for table in self:
8146 table._referenced_by = [field for field in table._referenced_by
8147 if not field.table==thistable]
8148
8150 step = long(kwargs.get('max_fetch_rows,',500))
8151 write_colnames = kwargs['write_colnames'] = \
8152 kwargs.get("write_colnames", True)
8153 for table in self.tables:
8154 ofile.write('TABLE %s\r\n' % table)
8155 query = self._adapter.id_query(self[table])
8156 nrows = self(query).count()
8157 kwargs['write_colnames'] = write_colnames
8158 for k in range(0,nrows,step):
8159 self(query).select(limitby=(k,k+step)).export_to_csv_file(
8160 ofile, *args, **kwargs)
8161 kwargs['write_colnames'] = False
8162 ofile.write('\r\n\r\n')
8163 ofile.write('END')
8164
8165 - def import_from_csv_file(self, ifile, id_map=None, null='<NULL>',
8166 unique='uuid', map_tablenames=None,
8167 ignore_missing_tables=False,
8168 *args, **kwargs):
8169
8170 id_offset = {}
8171 map_tablenames = map_tablenames or {}
8172 for line in ifile:
8173 line = line.strip()
8174 if not line:
8175 continue
8176 elif line == 'END':
8177 return
8178 elif not line.startswith('TABLE ') or \
8179 not line[6:] in self.tables:
8180 raise SyntaxError('invalid file format')
8181 else:
8182 tablename = line[6:]
8183 tablename = map_tablenames.get(tablename,tablename)
8184 if tablename is not None and tablename in self.tables:
8185 self[tablename].import_from_csv_file(
8186 ifile, id_map, null, unique, id_offset,
8187 *args, **kwargs)
8188 elif tablename is None or ignore_missing_tables:
8189
8190 for line in ifile:
8191 if not line.strip():
8192 break
8193 else:
8194 raise RuntimeError("Unable to import table that does not exist.\nTry db.import_from_csv_file(..., map_tablenames={'table':'othertable'},ignore_missing_tables=True)")
8195
8198 return DAL('<zombie>',db_uid=db_uid)
8199
8202
8203 copyreg.pickle(DAL, DAL_pickler, DAL_unpickler)
8206 """
8207 Helper class providing a comma-separated string having all the field names
8208 (prefixed by table name and '.')
8209
8210 normally only called from within gluon.sql
8211 """
8212
8215
8217 return ', '.join([str(field) for field in self._table])
8218
8221
8223 if not self._record:
8224 self._record = self._table[long(self)]
8225 if not self._record:
8226 raise RuntimeError(
8227 "Using a recursive select but encountered a broken reference: %s %d"%(self._table, long(self)))
8228
8230 if key == 'id':
8231 return long(self)
8232 if key in self._table:
8233 self.__allocate()
8234 if self._record:
8235 return self._record.get(key,None)
8236 else:
8237 return None
8238
8239 - def get(self, key, default=None):
8241
8248
8250 if key == 'id':
8251 return long(self)
8252 self.__allocate()
8253 return self._record.get(key, None)
8254
8256 self.__allocate()
8257 self._record[key] = value
8258
8261 return marshal.loads(data)
8262
8264 try:
8265 marshal_dump = marshal.dumps(long(data))
8266 except AttributeError:
8267 marshal_dump = 'i%s' % struct.pack('<i', long(data))
8268 return (Reference_unpickler, (marshal_dump,))
8269
8270 copyreg.pickle(Reference, Reference_pickler, Reference_unpickler)
8280 def _decorated(f):
8281 instance = self.table
8282 import types
8283 method = types.MethodType(f, instance, instance.__class__)
8284 name = method_name or f.func_name
8285 setattr(instance, name, method)
8286 return f
8287 return _decorated
8288
8290
8291 """
8292 an instance of this class represents a database table
8293
8294 Example::
8295
8296 db = DAL(...)
8297 db.define_table('users', Field('name'))
8298 db.users.insert(name='me') # print db.users._insert(...) to see SQL
8299 db.users.drop()
8300 """
8301
8302 - def __init__(
8303 self,
8304 db,
8305 tablename,
8306 *fields,
8307 **args
8308 ):
8309 """
8310 Initializes the table and performs checking on the provided fields.
8311
8312 Each table will have automatically an 'id'.
8313
8314 If a field is of type Table, the fields (excluding 'id') from that table
8315 will be used instead.
8316
8317 :raises SyntaxError: when a supplied field is of incorrect type.
8318 """
8319 self._actual = False
8320 self._tablename = tablename
8321 self._ot = args.get('actual_name')
8322 self._sequence_name = args.get('sequence_name') or \
8323 db and db._adapter.sequence_name(tablename)
8324 self._trigger_name = args.get('trigger_name') or \
8325 db and db._adapter.trigger_name(tablename)
8326 self._common_filter = args.get('common_filter')
8327 self._format = args.get('format')
8328 self._singular = args.get(
8329 'singular',tablename.replace('_',' ').capitalize())
8330 self._plural = args.get(
8331 'plural',pluralize(self._singular.lower()).capitalize())
8332
8333 if 'primarykey' in args and args['primarykey'] is not None:
8334 self._primarykey = args.get('primarykey')
8335
8336 self._before_insert = []
8337 self._before_update = [Set.delete_uploaded_files]
8338 self._before_delete = [Set.delete_uploaded_files]
8339 self._after_insert = []
8340 self._after_update = []
8341 self._after_delete = []
8342
8343 self.add_method = MethodAdder(self)
8344
8345 fieldnames,newfields=set(),[]
8346 _primarykey = getattr(self, '_primarykey', None)
8347 if _primarykey is not None:
8348 if not isinstance(_primarykey, list):
8349 raise SyntaxError(
8350 "primarykey must be a list of fields from table '%s'" \
8351 % tablename)
8352 if len(_primarykey)==1:
8353 self._id = [f for f in fields if isinstance(f,Field) \
8354 and f.name==_primarykey[0]][0]
8355 elif not [f for f in fields if (isinstance(f,Field) and
8356 f.type=='id') or (isinstance(f, dict) and
8357 f.get("type", None)=="id")]:
8358 field = Field('id', 'id')
8359 newfields.append(field)
8360 fieldnames.add('id')
8361 self._id = field
8362 virtual_fields = []
8363 def include_new(field):
8364 newfields.append(field)
8365 fieldnames.add(field.name)
8366 if field.type=='id':
8367 self._id = field
8368 for field in fields:
8369 if isinstance(field, (FieldMethod, FieldVirtual)):
8370 virtual_fields.append(field)
8371 elif isinstance(field, Field) and not field.name in fieldnames:
8372 if field.db is not None:
8373 field = copy.copy(field)
8374 include_new(field)
8375 elif isinstance(field, dict) and not field['fieldname'] in fieldnames:
8376 include_new(Field(**field))
8377 elif isinstance(field, Table):
8378 table = field
8379 for field in table:
8380 if not field.name in fieldnames and not field.type=='id':
8381 t2 = not table._actual and self._tablename
8382 include_new(field.clone(point_self_references_to=t2))
8383 elif not isinstance(field, (Field, Table)):
8384 raise SyntaxError(
8385 'define_table argument is not a Field or Table: %s' % field)
8386 fields = newfields
8387 self._db = db
8388 tablename = tablename
8389 self._fields = SQLCallableList()
8390 self.virtualfields = []
8391 fields = list(fields)
8392
8393 if db and db._adapter.uploads_in_blob==True:
8394 uploadfields = [f.name for f in fields if f.type=='blob']
8395 for field in fields:
8396 fn = field.uploadfield
8397 if isinstance(field, Field) and field.type == 'upload'\
8398 and fn is True:
8399 fn = field.uploadfield = '%s_blob' % field.name
8400 if isinstance(fn,str) and not fn in uploadfields:
8401 fields.append(Field(fn,'blob',default='',
8402 writable=False,readable=False))
8403
8404 lower_fieldnames = set()
8405 reserved = dir(Table) + ['fields']
8406 if (db and db.check_reserved):
8407 check_reserved = db.check_reserved_keyword
8408 else:
8409 def check_reserved(field_name):
8410 if field_name in reserved:
8411 raise SyntaxError("field name %s not allowed" % field_name)
8412 for field in fields:
8413 field_name = field.name
8414 check_reserved(field_name)
8415 fn_lower = field_name.lower()
8416 if fn_lower in lower_fieldnames:
8417 raise SyntaxError("duplicate field %s in table %s" \
8418 % (field_name, tablename))
8419 else:
8420 lower_fieldnames.add(fn_lower)
8421
8422 self.fields.append(field_name)
8423 self[field_name] = field
8424 if field.type == 'id':
8425 self['id'] = field
8426 field.tablename = field._tablename = tablename
8427 field.table = field._table = self
8428 field.db = field._db = db
8429 self.ALL = SQLALL(self)
8430
8431 if _primarykey is not None:
8432 for k in _primarykey:
8433 if k not in self.fields:
8434 raise SyntaxError(
8435 "primarykey must be a list of fields from table '%s " % tablename)
8436 else:
8437 self[k].notnull = True
8438 for field in virtual_fields:
8439 self[field.name] = field
8440
8441 @property
8444
8445 - def update(self,*args,**kwargs):
8446 raise RuntimeError("Syntax Not Supported")
8447
8448 - def _enable_record_versioning(self,
8449 archive_db=None,
8450 archive_name = '%(tablename)s_archive',
8451 is_active = 'is_active',
8452 current_record = 'current_record',
8453 current_record_label = None):
8454 db = self._db
8455 archive_db = archive_db or db
8456 archive_name = archive_name % dict(tablename=self._tablename)
8457 if archive_name in archive_db.tables():
8458 return
8459 fieldnames = self.fields()
8460 same_db = archive_db is db
8461 field_type = self if same_db else 'bigint'
8462 clones = []
8463 for field in self:
8464 nfk = same_db or not field.type.startswith('reference')
8465 clones.append(field.clone(
8466 unique=False, type=field.type if nfk else 'bigint'))
8467 archive_db.define_table(
8468 archive_name,
8469 Field(current_record,field_type,label=current_record_label),
8470 *clones,**dict(format=self._format))
8471
8472 self._before_update.append(
8473 lambda qset,fs,db=archive_db,an=archive_name,cn=current_record:
8474 archive_record(qset,fs,db[an],cn))
8475 if is_active and is_active in fieldnames:
8476 self._before_delete.append(
8477 lambda qset: qset.update(is_active=False))
8478 newquery = lambda query, t=self, name=self._tablename: \
8479 reduce(AND,[db[tn].is_active == True
8480 for tn in db._adapter.tables(query)
8481 if tn==name or getattr(db[tn],'_ot',None)==name])
8482 query = self._common_filter
8483 if query:
8484 newquery = query & newquery
8485 self._common_filter = newquery
8486
8494
8496 db = self._db
8497 pr = db._pending_references
8498 self._referenced_by = []
8499 self._references = []
8500 for field in self:
8501 fieldname = field.name
8502 field_type = field.type
8503 if isinstance(field_type,str) and field_type[:10] == 'reference ':
8504 ref = field_type[10:].strip()
8505 if not ref:
8506 SyntaxError('Table: reference to nothing: %s' %ref)
8507 if '.' in ref:
8508 rtablename, throw_it,rfieldname = ref.partition('.')
8509 else:
8510 rtablename, rfieldname = ref, None
8511 if not rtablename in db:
8512 pr[rtablename] = pr.get(rtablename,[]) + [field]
8513 continue
8514 rtable = db[rtablename]
8515 if rfieldname:
8516 if not hasattr(rtable,'_primarykey'):
8517 raise SyntaxError(
8518 'keyed tables can only reference other keyed tables (for now)')
8519 if rfieldname not in rtable.fields:
8520 raise SyntaxError(
8521 "invalid field '%s' for referenced table '%s' in table '%s'" \
8522 % (rfieldname, rtablename, self._tablename))
8523 rfield = rtable[rfieldname]
8524 else:
8525 rfield = rtable._id
8526 rtable._referenced_by.append(field)
8527 field.referent = rfield
8528 self._references.append(field)
8529 else:
8530 field.referent = None
8531 for referee in pr.get(self._tablename,[]):
8532 self._referenced_by.append(referee)
8533
8535 return dict([(k, v) for (k, v) in record.iteritems() if k
8536 in self.fields and (self[k].type!='id' or id)])
8537
8539 """ for keyed table only """
8540 query = None
8541 for k,v in key.iteritems():
8542 if k in self._primarykey:
8543 if query:
8544 query = query & (self[k] == v)
8545 else:
8546 query = (self[k] == v)
8547 else:
8548 raise SyntaxError(
8549 'Field %s is not part of the primary key of %s' % \
8550 (k,self._tablename))
8551 return query
8552
8554 if not key:
8555 return None
8556 elif isinstance(key, dict):
8557 """ for keyed table """
8558 query = self._build_query(key)
8559 return self._db(query).select(limitby=(0,1), orderby_on_limitby=False).first()
8560 elif str(key).isdigit() or 'google' in DRIVERS and isinstance(key, Key):
8561 return self._db(self._id == key).select(limitby=(0,1), orderby_on_limitby=False).first()
8562 elif key:
8563 return ogetattr(self, str(key))
8564
8566 for_update = kwargs.get('_for_update',False)
8567 if '_for_update' in kwargs: del kwargs['_for_update']
8568
8569 orderby = kwargs.get('_orderby',None)
8570 if '_orderby' in kwargs: del kwargs['_orderby']
8571
8572 if not key is DEFAULT:
8573 if isinstance(key, Query):
8574 record = self._db(key).select(
8575 limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8576 elif not str(key).isdigit():
8577 record = None
8578 else:
8579 record = self._db(self._id == key).select(
8580 limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8581 if record:
8582 for k,v in kwargs.iteritems():
8583 if record[k]!=v: return None
8584 return record
8585 elif kwargs:
8586 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.iteritems()])
8587 return self._db(query).select(limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8588 else:
8589 return None
8590
8592 if isinstance(key, dict) and isinstance(value, dict):
8593 """ option for keyed table """
8594 if set(key.keys()) == set(self._primarykey):
8595 value = self._filter_fields(value)
8596 kv = {}
8597 kv.update(value)
8598 kv.update(key)
8599 if not self.insert(**kv):
8600 query = self._build_query(key)
8601 self._db(query).update(**self._filter_fields(value))
8602 else:
8603 raise SyntaxError(
8604 'key must have all fields from primary key: %s'%\
8605 (self._primarykey))
8606 elif str(key).isdigit():
8607 if key == 0:
8608 self.insert(**self._filter_fields(value))
8609 elif self._db(self._id == key)\
8610 .update(**self._filter_fields(value)) is None:
8611 raise SyntaxError('No such record: %s' % key)
8612 else:
8613 if isinstance(key, dict):
8614 raise SyntaxError(
8615 'value must be a dictionary: %s' % value)
8616 osetattr(self, str(key), value)
8617
8618 __getattr__ = __getitem__
8619
8621 if key[:1]!='_' and key in self:
8622 raise SyntaxError('Object exists and cannot be redefined: %s' % key)
8623 osetattr(self,key,value)
8624
8626 if isinstance(key, dict):
8627 query = self._build_query(key)
8628 if not self._db(query).delete():
8629 raise SyntaxError('No such record: %s' % key)
8630 elif not str(key).isdigit() or \
8631 not self._db(self._id == key).delete():
8632 raise SyntaxError('No such record: %s' % key)
8633
8635 return hasattr(self,key)
8636
8637 has_key = __contains__
8638
8640 return self.__dict__.items()
8641
8643 for fieldname in self.fields:
8644 yield self[fieldname]
8645
8648
8649
8651 return '<Table %s (%s)>' % (self._tablename,','.join(self.fields()))
8652
8654 if self._ot is not None:
8655 ot = self._db._adapter.QUOTE_TEMPLATE % self._ot
8656 if 'Oracle' in str(type(self._db._adapter)):
8657 return '%s %s' % (ot, self._tablename)
8658 return '%s AS %s' % (ot, self._tablename)
8659 return self._tablename
8660
8661 - def _drop(self, mode = ''):
8662 return self._db._adapter._drop(self, mode)
8663
8664 - def drop(self, mode = ''):
8665 return self._db._adapter.drop(self,mode)
8666
8667 - def _listify(self,fields,update=False):
8668 new_fields = {}
8669
8670
8671 for name in fields:
8672 if not name in self.fields:
8673 if name != 'id':
8674 raise SyntaxError(
8675 'Field %s does not belong to the table' % name)
8676 else:
8677 field = self[name]
8678 value = fields[name]
8679 if field.filter_in:
8680 value = field.filter_in(value)
8681 new_fields[name] = (field,value)
8682
8683
8684 to_compute = []
8685 for ofield in self:
8686 name = ofield.name
8687 if not name in new_fields:
8688
8689 if ofield.compute:
8690 to_compute.append((name,ofield))
8691
8692 elif not update and not ofield.default is None:
8693 value = ofield.default
8694 fields[name] = value
8695 new_fields[name] = (ofield,value)
8696
8697 elif update and not ofield.update is None:
8698 value = ofield.update
8699 fields[name] = value
8700 new_fields[name] = (ofield,value)
8701
8702 elif not update and ofield.required:
8703 raise RuntimeError(
8704 'Table: missing required field: %s' % name)
8705
8706 if to_compute:
8707 row = Row(fields)
8708 for name,ofield in to_compute:
8709
8710 try:
8711 row[name] = new_value = ofield.compute(row)
8712 new_fields[name] = (ofield, new_value)
8713 except (KeyError, AttributeError):
8714
8715 if ofield.required:
8716 raise SyntaxError('unable to compute field: %s' % name)
8717 return new_fields.values()
8718
8720 for field in self:
8721 if field.type=='upload' and field.name in fields:
8722 value = fields[field.name]
8723 if value is not None and not isinstance(value,str):
8724 if hasattr(value,'file') and hasattr(value,'filename'):
8725 new_name = field.store(value.file,filename=value.filename)
8726 elif hasattr(value,'read') and hasattr(value,'name'):
8727 new_name = field.store(value,filename=value.name)
8728 else:
8729 raise RuntimeError("Unable to handle upload")
8730 fields[field.name] = new_name
8731
8733 "If there are no fields/values specified, return table defaults"
8734 if not fields:
8735 fields = {}
8736 for field in self:
8737 if field.type != "id":
8738 fields[field.name] = field.default
8739 return fields
8740
8744
8754
8770
8796
8798 if _key is DEFAULT:
8799 record = self(**values)
8800 elif isinstance(_key,dict):
8801 record = self(**_key)
8802 else:
8803 record = self(_key)
8804 if record:
8805 record.update_record(**values)
8806 newid = None
8807 else:
8808 newid = self.insert(**values)
8809 return newid
8810
8812 """
8813 here items is a list of dictionaries
8814 """
8815 items = [self._listify(item) for item in items]
8816 if any(f(item) for item in items for f in self._before_insert):return 0
8817 ret = self._db._adapter.bulk_insert(self,items)
8818 ret and [[f(item,ret[k]) for k,item in enumerate(items)] for f in self._after_insert]
8819 return ret
8820
8822 return self._db._adapter._truncate(self, mode)
8823
8825 return self._db._adapter.truncate(self, mode)
8826
8827 - def import_from_csv_file(
8828 self,
8829 csvfile,
8830 id_map=None,
8831 null='<NULL>',
8832 unique='uuid',
8833 id_offset=None,
8834 *args, **kwargs
8835 ):
8836 """
8837 Import records from csv file.
8838 Column headers must have same names as table fields.
8839 Field 'id' is ignored.
8840 If column names read 'table.file' the 'table.' prefix is ignored.
8841 'unique' argument is a field which must be unique
8842 (typically a uuid field)
8843 'restore' argument is default False;
8844 if set True will remove old values in table first.
8845 'id_map' ff set to None will not map ids.
8846 The import will keep the id numbers in the restored table.
8847 This assumes that there is an field of type id that
8848 is integer and in incrementing order.
8849 Will keep the id numbers in restored table.
8850 """
8851
8852 delimiter = kwargs.get('delimiter', ',')
8853 quotechar = kwargs.get('quotechar', '"')
8854 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
8855 restore = kwargs.get('restore', False)
8856 if restore:
8857 self._db[self].truncate()
8858
8859 reader = csv.reader(csvfile, delimiter=delimiter,
8860 quotechar=quotechar, quoting=quoting)
8861 colnames = None
8862 if isinstance(id_map, dict):
8863 if not self._tablename in id_map:
8864 id_map[self._tablename] = {}
8865 id_map_self = id_map[self._tablename]
8866
8867 def fix(field, value, id_map, id_offset):
8868 list_reference_s='list:reference'
8869 if value == null:
8870 value = None
8871 elif field.type=='blob':
8872 value = base64.b64decode(value)
8873 elif field.type=='double' or field.type=='float':
8874 if not value.strip():
8875 value = None
8876 else:
8877 value = float(value)
8878 elif field.type in ('integer','bigint'):
8879 if not value.strip():
8880 value = None
8881 else:
8882 value = long(value)
8883 elif field.type.startswith('list:string'):
8884 value = bar_decode_string(value)
8885 elif field.type.startswith(list_reference_s):
8886 ref_table = field.type[len(list_reference_s):].strip()
8887 if id_map is not None:
8888 value = [id_map[ref_table][long(v)] \
8889 for v in bar_decode_string(value)]
8890 else:
8891 value = [v for v in bar_decode_string(value)]
8892 elif field.type.startswith('list:'):
8893 value = bar_decode_integer(value)
8894 elif id_map and field.type.startswith('reference'):
8895 try:
8896 value = id_map[field.type[9:].strip()][long(value)]
8897 except KeyError:
8898 pass
8899 elif id_offset and field.type.startswith('reference'):
8900 try:
8901 value = id_offset[field.type[9:].strip()]+long(value)
8902 except KeyError:
8903 pass
8904 return (field.name, value)
8905
8906 def is_id(colname):
8907 if colname in self:
8908 return self[colname].type == 'id'
8909 else:
8910 return False
8911
8912 first = True
8913 unique_idx = None
8914 for lineno, line in enumerate(reader):
8915 if not line:
8916 break
8917 if not colnames:
8918
8919 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
8920 cols, cid = [], None
8921 for i,colname in enumerate(colnames):
8922 if is_id(colname):
8923 cid = i
8924 elif colname in self.fields:
8925 cols.append((i,self[colname]))
8926 if colname == unique:
8927 unique_idx = i
8928 else:
8929
8930 items = []
8931 for i, field in cols:
8932 try:
8933 items.append(fix(field, line[i], id_map, id_offset))
8934 except ValueError:
8935 raise RuntimeError("Unable to parse line:%s field:%s value:'%s'"
8936 % (lineno+1,field,line[i]))
8937
8938 if not (id_map or cid is None or id_offset is None or unique_idx):
8939 csv_id = long(line[cid])
8940 curr_id = self.insert(**dict(items))
8941 if first:
8942 first = False
8943
8944
8945
8946 id_offset[self._tablename] = (curr_id-csv_id) \
8947 if curr_id>csv_id else 0
8948
8949 while curr_id<csv_id+id_offset[self._tablename]:
8950 self._db(self._db[self][colnames[cid]] == curr_id).delete()
8951 curr_id = self.insert(**dict(items))
8952
8953
8954 elif not unique_idx:
8955 new_id = self.insert(**dict(items))
8956 else:
8957 unique_value = line[unique_idx]
8958 query = self._db[self][unique] == unique_value
8959 record = self._db(query).select().first()
8960 if record:
8961 record.update_record(**dict(items))
8962 new_id = record[self._id.name]
8963 else:
8964 new_id = self.insert(**dict(items))
8965 if id_map and cid is not None:
8966 id_map_self[long(line[cid])] = new_id
8967
8968 - def as_dict(self, flat=False, sanitize=True):
8980
8981 - def as_xml(self, sanitize=True):
8986
8987 - def as_json(self, sanitize=True):
8992
8993 - def as_yaml(self, sanitize=True):
8998
9001
9002 - def on(self, query):
9003 return Expression(self._db,self._db._adapter.ON,self,query)
9004
9006 tablenames = qset.db._adapter.tables(qset.query)
9007 if len(tablenames)!=1: raise RuntimeError("cannot update join")
9008 table = qset.db[tablenames[0]]
9009 for row in qset.select():
9010 fields = archive_table._filter_fields(row)
9011 fields[current_record] = row.id
9012 archive_table.insert(**fields)
9013 return False
9014
9018
9019 - def __init__(
9020 self,
9021 db,
9022 op,
9023 first=None,
9024 second=None,
9025 type=None,
9026 **optional_args
9027 ):
9028
9029 self.db = db
9030 self.op = op
9031 self.first = first
9032 self.second = second
9033 self._table = getattr(first,'_table',None)
9034
9035 if not type and first and hasattr(first,'type'):
9036 self.type = first.type
9037 else:
9038 self.type = type
9039 self.optional_args = optional_args
9040
9044
9048
9052
9056
9060
9064
9068
9072
9076
9080
9084
9088
9092
9096
9100
9104
9108
9112
9114 db = self.db
9115 if start < 0:
9116 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
9117 else:
9118 pos0 = start + 1
9119
9120 if stop < 0:
9121 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
9122 elif stop == sys.maxint:
9123 length = self.len()
9124 else:
9125 length = '(%s - %s)' % (stop + 1, pos0)
9126 return Expression(db,db._adapter.SUBSTRING,
9127 self, (pos0, length), self.type)
9128
9130 return self[i:i + 1]
9131
9133 return self.db._adapter.expand(self,self.type)
9134
9136 db = self.db
9137 return Expression(db,db._adapter.COMMA,self,other,self.type)
9138
9144
9148
9150 db = self.db
9151 if self.type in ('integer','bigint'):
9152 result_type = 'integer'
9153 elif self.type in ['date','time','datetime','double','float']:
9154 result_type = 'double'
9155 elif self.type.startswith('decimal('):
9156 result_type = self.type
9157 else:
9158 raise SyntaxError("subtraction operation not supported for type")
9159 return Expression(db,db._adapter.SUB,self,other,result_type)
9160
9164
9168
9172
9176
9180
9184
9188
9192
9196
9197 - def like(self, value, case_sensitive=False):
9198 db = self.db
9199 op = case_sensitive and db._adapter.LIKE or db._adapter.ILIKE
9200 return Query(db, op, self, value)
9201
9205
9206 - def belongs(self, *value, **kwattr):
9207 """
9208 Accepts the following inputs:
9209 field.belongs(1,2)
9210 field.belongs((1,2))
9211 field.belongs(query)
9212
9213 Does NOT accept:
9214 field.belongs(1)
9215 """
9216 db = self.db
9217 if len(value) == 1:
9218 value = value[0]
9219 if isinstance(value,Query):
9220 value = db(value)._select(value.first._table._id)
9221 elif not isinstance(value, basestring):
9222 value = set(value)
9223 if kwattr.get('null') and None in value:
9224 value.remove(None)
9225 return (self == None) | Query(db, db._adapter.BELONGS, self, value)
9226 return Query(db, db._adapter.BELONGS, self, value)
9227
9229 db = self.db
9230 if not self.type in ('string', 'text', 'json', 'upload'):
9231 raise SyntaxError("startswith used with incompatible field type")
9232 return Query(db, db._adapter.STARTSWITH, self, value)
9233
9235 db = self.db
9236 if not self.type in ('string', 'text', 'json', 'upload'):
9237 raise SyntaxError("endswith used with incompatible field type")
9238 return Query(db, db._adapter.ENDSWITH, self, value)
9239
9240 - def contains(self, value, all=False, case_sensitive=False):
9241 """
9242 The case_sensitive parameters is only useful for PostgreSQL
9243 For other RDMBs it is ignored and contains is always case in-sensitive
9244 For MongoDB and GAE contains is always case sensitive
9245 """
9246 db = self.db
9247 if isinstance(value,(list, tuple)):
9248 subqueries = [self.contains(str(v).strip(),case_sensitive=case_sensitive)
9249 for v in value if str(v).strip()]
9250 if not subqueries:
9251 return self.contains('')
9252 else:
9253 return reduce(all and AND or OR,subqueries)
9254 if not self.type in ('string', 'text', 'json', 'upload') and not self.type.startswith('list:'):
9255 raise SyntaxError("contains used with incompatible field type")
9256 return Query(db, db._adapter.CONTAINS, self, value, case_sensitive=case_sensitive)
9257
9261
9262
9263
9264 - def st_asgeojson(self, precision=15, options=0, version=1):
9268
9269 - def st_astext(self):
9270 db = self.db
9271 return Expression(db, db._adapter.ST_ASTEXT, self, type='string')
9272
9276
9280
9284
9288
9289
9290
9294
9298
9302
9306
9310
9314
9319 """
9320 allows defining of custom SQL types
9321
9322 Example::
9323
9324 decimal = SQLCustomType(
9325 type ='double',
9326 native ='integer',
9327 encoder =(lambda x: int(float(x) * 100)),
9328 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
9329 )
9330
9331 db.define_table(
9332 'example',
9333 Field('value', type=decimal)
9334 )
9335
9336 :param type: the web2py type (default = 'string')
9337 :param native: the backend type
9338 :param encoder: how to encode the value to store it in the backend
9339 :param decoder: how to decode the value retrieved from the backend
9340 :param validator: what validators to use ( default = None, will use the
9341 default validator for type)
9342 """
9343
9344 - def __init__(
9345 self,
9346 type='string',
9347 native=None,
9348 encoder=None,
9349 decoder=None,
9350 validator=None,
9351 _class=None,
9352 ):
9353
9354 self.type = type
9355 self.native = native
9356 self.encoder = encoder or (lambda x: x)
9357 self.decoder = decoder or (lambda x: x)
9358 self.validator = validator
9359 self._class = _class or type
9360
9362 try:
9363 return self.type.startswith(self, text)
9364 except TypeError:
9365 return False
9366
9369
9372
9375
9377 - def __init__(self, name, f=None, ftype='string',label=None,table_name=None):
9392 return '%s.%s' % (self.tablename, self.name)
9393
9395 - def __init__(self, name, f=None, handler=None):
9399
9401 return ', '.join(str(y) for y in x or [])
9402
9403 -class Field(Expression):
9404
9405 Virtual = FieldVirtual
9406 Method = FieldMethod
9407 Lazy = FieldMethod
9408
9409 """
9410 an instance of this class represents a database field
9411
9412 example::
9413
9414 a = Field(name, 'string', length=32, default=None, required=False,
9415 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
9416 notnull=False, unique=False,
9417 uploadfield=True, widget=None, label=None, comment=None,
9418 uploadfield=True, # True means store on disk,
9419 # 'a_field_name' means store in this field in db
9420 # False means file content will be discarded.
9421 writable=True, readable=True, update=None, authorize=None,
9422 autodelete=False, represent=None, uploadfolder=None,
9423 uploadseparate=False # upload to separate directories by uuid_keys
9424 # first 2 character and tablename.fieldname
9425 # False - old behavior
9426 # True - put uploaded file in
9427 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
9428 # directory)
9429 uploadfs=None # a pyfilesystem where to store upload
9430
9431 to be used as argument of DAL.define_table
9432
9433 allowed field types:
9434 string, boolean, integer, double, text, blob,
9435 date, time, datetime, upload, password
9436
9437 """
9438
9439 - def __init__(
9440 self,
9441 fieldname,
9442 type='string',
9443 length=None,
9444 default=DEFAULT,
9445 required=False,
9446 requires=DEFAULT,
9447 ondelete='CASCADE',
9448 notnull=False,
9449 unique=False,
9450 uploadfield=True,
9451 widget=None,
9452 label=None,
9453 comment=None,
9454 writable=True,
9455 readable=True,
9456 update=None,
9457 authorize=None,
9458 autodelete=False,
9459 represent=None,
9460 uploadfolder=None,
9461 uploadseparate=False,
9462 uploadfs=None,
9463 compute=None,
9464 custom_store=None,
9465 custom_retrieve=None,
9466 custom_retrieve_file_properties=None,
9467 custom_delete=None,
9468 filter_in = None,
9469 filter_out = None,
9470 custom_qualifier = None,
9471 map_none = None,
9472 ):
9473 self._db = self.db = None
9474 self.op = None
9475 self.first = None
9476 self.second = None
9477 if isinstance(fieldname, unicode):
9478 try:
9479 fieldname = str(fieldname)
9480 except UnicodeEncodeError:
9481 raise SyntaxError('Field: invalid unicode field name')
9482 self.name = fieldname = cleanup(fieldname)
9483 if not isinstance(fieldname, str) or hasattr(Table, fieldname) or \
9484 fieldname[0] == '_' or REGEX_PYTHON_KEYWORDS.match(fieldname):
9485 raise SyntaxError('Field: invalid field name: %s' % fieldname)
9486 self.type = type if not isinstance(type, (Table,Field)) else 'reference %s' % type
9487 self.length = length if not length is None else DEFAULTLENGTH.get(self.type,512)
9488 self.default = default if default!=DEFAULT else (update or None)
9489 self.required = required
9490 self.ondelete = ondelete.upper()
9491 self.notnull = notnull
9492 self.unique = unique
9493 self.uploadfield = uploadfield
9494 self.uploadfolder = uploadfolder
9495 self.uploadseparate = uploadseparate
9496 self.uploadfs = uploadfs
9497 self.widget = widget
9498 self.comment = comment
9499 self.writable = writable
9500 self.readable = readable
9501 self.update = update
9502 self.authorize = authorize
9503 self.autodelete = autodelete
9504 self.represent = list_represent if \
9505 represent==None and type in ('list:integer','list:string') else represent
9506 self.compute = compute
9507 self.isattachment = True
9508 self.custom_store = custom_store
9509 self.custom_retrieve = custom_retrieve
9510 self.custom_retrieve_file_properties = custom_retrieve_file_properties
9511 self.custom_delete = custom_delete
9512 self.filter_in = filter_in
9513 self.filter_out = filter_out
9514 self.custom_qualifier = custom_qualifier
9515 self.label = label if label!=None else fieldname.replace('_',' ').title()
9516 self.requires = requires if requires!=None else []
9517 self.map_none = map_none
9518
9520 self.__dict__.update(*args,**attributes)
9521
9522 - def clone(self,point_self_references_to=False,**args):
9523 field = copy.copy(self)
9524 if point_self_references_to and \
9525 field.type == 'reference %s'+field._tablename:
9526 field.type = 'reference %s' % point_self_references_to
9527 field.__dict__.update(args)
9528 return field
9529
9530 - def store(self, file, filename=None, path=None):
9531 if self.custom_store:
9532 return self.custom_store(file,filename,path)
9533 if isinstance(file, cgi.FieldStorage):
9534 filename = filename or file.filename
9535 file = file.file
9536 elif not filename:
9537 filename = file.name
9538 filename = os.path.basename(filename.replace('/', os.sep)\
9539 .replace('\\', os.sep))
9540 m = REGEX_STORE_PATTERN.search(filename)
9541 extension = m and m.group('e') or 'txt'
9542 uuid_key = web2py_uuid().replace('-', '')[-16:]
9543 encoded_filename = base64.b16encode(filename).lower()
9544 newfilename = '%s.%s.%s.%s' % \
9545 (self._tablename, self.name, uuid_key, encoded_filename)
9546 newfilename = newfilename[:(self.length - 1 - len(extension))] + '.' + extension
9547 self_uploadfield = self.uploadfield
9548 if isinstance(self_uploadfield,Field):
9549 blob_uploadfield_name = self_uploadfield.uploadfield
9550 keys={self_uploadfield.name: newfilename,
9551 blob_uploadfield_name: file.read()}
9552 self_uploadfield.table.insert(**keys)
9553 elif self_uploadfield == True:
9554 if path:
9555 pass
9556 elif self.uploadfolder:
9557 path = self.uploadfolder
9558 elif self.db._adapter.folder:
9559 path = pjoin(self.db._adapter.folder, '..', 'uploads')
9560 else:
9561 raise RuntimeError(
9562 "you must specify a Field(...,uploadfolder=...)")
9563 if self.uploadseparate:
9564 if self.uploadfs:
9565 raise RuntimeError("not supported")
9566 path = pjoin(path,"%s.%s" %(self._tablename, self.name),
9567 uuid_key[:2])
9568 if not exists(path):
9569 os.makedirs(path)
9570 pathfilename = pjoin(path, newfilename)
9571 if self.uploadfs:
9572 dest_file = self.uploadfs.open(newfilename, 'wb')
9573 else:
9574 dest_file = open(pathfilename, 'wb')
9575 try:
9576 shutil.copyfileobj(file, dest_file)
9577 except IOError:
9578 raise IOError(
9579 'Unable to store file "%s" because invalid permissions, readonly file system, or filename too long' % pathfilename)
9580 dest_file.close()
9581 return newfilename
9582
9583 - def retrieve(self, name, path=None, nameonly=False):
9584 """
9585 if nameonly==True return (filename, fullfilename) instead of
9586 (filename, stream)
9587 """
9588 self_uploadfield = self.uploadfield
9589 if self.custom_retrieve:
9590 return self.custom_retrieve(name, path)
9591 import http
9592 if self.authorize or isinstance(self_uploadfield, str):
9593 row = self.db(self == name).select().first()
9594 if not row:
9595 raise http.HTTP(404)
9596 if self.authorize and not self.authorize(row):
9597 raise http.HTTP(403)
9598 file_properties = self.retrieve_file_properties(name,path)
9599 filename = file_properties['filename']
9600 if isinstance(self_uploadfield, str):
9601 stream = StringIO.StringIO(row[self_uploadfield] or '')
9602 elif isinstance(self_uploadfield,Field):
9603 blob_uploadfield_name = self_uploadfield.uploadfield
9604 query = self_uploadfield == name
9605 data = self_uploadfield.table(query)[blob_uploadfield_name]
9606 stream = StringIO.StringIO(data)
9607 elif self.uploadfs:
9608
9609 stream = self.uploadfs.open(name, 'rb')
9610 else:
9611
9612
9613
9614 fullname = pjoin(file_properties['path'],name)
9615 if nameonly:
9616 return (filename, fullname)
9617 stream = open(fullname,'rb')
9618 return (filename, stream)
9619
9621 m = REGEX_UPLOAD_PATTERN.match(name)
9622 if not m or not self.isattachment:
9623 raise TypeError('Can\'t retrieve %s file properties' % name)
9624 self_uploadfield = self.uploadfield
9625 if self.custom_retrieve_file_properties:
9626 return self.custom_retrieve_file_properties(name, path)
9627 if m.group('name'):
9628 try:
9629 filename = base64.b16decode(m.group('name'), True)
9630 filename = REGEX_CLEANUP_FN.sub('_', filename)
9631 except (TypeError, AttributeError):
9632 filename = name
9633 else:
9634 filename = name
9635
9636 if isinstance(self_uploadfield, (str, Field)):
9637 return dict(path=None,filename=filename)
9638
9639 if not path:
9640 if self.uploadfolder:
9641 path = self.uploadfolder
9642 else:
9643 path = pjoin(self.db._adapter.folder, '..', 'uploads')
9644 if self.uploadseparate:
9645 t = m.group('table')
9646 f = m.group('field')
9647 u = m.group('uuidkey')
9648 path = pjoin(path,"%s.%s" % (t,f),u[:2])
9649 return dict(path=path,filename=filename)
9650
9651
9667
9679
9680 - def count(self, distinct=None):
9682
9683 - def as_dict(self, flat=False, sanitize=True):
9684 attrs = ("name", 'authorize', 'represent', 'ondelete',
9685 'custom_store', 'autodelete', 'custom_retrieve',
9686 'filter_out', 'uploadseparate', 'widget', 'uploadfs',
9687 'update', 'custom_delete', 'uploadfield', 'uploadfolder',
9688 'custom_qualifier', 'unique', 'writable', 'compute',
9689 'map_none', 'default', 'type', 'required', 'readable',
9690 'requires', 'comment', 'label', 'length', 'notnull',
9691 'custom_retrieve_file_properties', 'filter_in')
9692 serializable = (int, long, basestring, float, tuple,
9693 bool, type(None))
9694
9695 def flatten(obj):
9696 if isinstance(obj, dict):
9697 return dict((flatten(k), flatten(v)) for k, v in
9698 obj.items())
9699 elif isinstance(obj, (tuple, list, set)):
9700 return [flatten(v) for v in obj]
9701 elif isinstance(obj, serializable):
9702 return obj
9703 elif isinstance(obj, (datetime.datetime,
9704 datetime.date, datetime.time)):
9705 return str(obj)
9706 else:
9707 return None
9708
9709 d = dict()
9710 if not (sanitize and not (self.readable or self.writable)):
9711 for attr in attrs:
9712 if flat:
9713 d.update({attr: flatten(getattr(self, attr))})
9714 else:
9715 d.update({attr: getattr(self, attr)})
9716 d["fieldname"] = d.pop("name")
9717 return d
9718
9719 - def as_xml(self, sanitize=True):
9726
9727 - def as_json(self, sanitize=True):
9734
9735 - def as_yaml(self, sanitize=True):
9741
9744
9746 try:
9747 return '%s.%s' % (self.tablename, self.name)
9748 except:
9749 return '<no table>.%s' % self.name
9750
9751
9752 -class Query(object):
9753
9754 """
9755 a query object necessary to define a set.
9756 it can be stored or can be passed to DAL.__call__() to obtain a Set
9757
9758 Example::
9759
9760 query = db.users.name=='Max'
9761 set = db(query)
9762 records = set.select()
9763
9764 """
9765
9766 - def __init__(
9767 self,
9768 db,
9769 op,
9770 first=None,
9771 second=None,
9772 ignore_common_filters = False,
9773 **optional_args
9774 ):
9775 self.db = self._db = db
9776 self.op = op
9777 self.first = first
9778 self.second = second
9779 self.ignore_common_filters = ignore_common_filters
9780 self.optional_args = optional_args
9781
9784
9786 return self.db._adapter.expand(self)
9787
9790
9791 __rand__ = __and__
9792
9794 return Query(self.db,self.db._adapter.OR,self,other)
9795
9796 __ror__ = __or__
9797
9799 if self.op==self.db._adapter.NOT:
9800 return self.first
9801 return Query(self.db,self.db._adapter.NOT,self)
9802
9804 return repr(self) == repr(other)
9805
9807 return not (self == other)
9808
9809 - def case(self,t=1,f=0):
9810 return self.db._adapter.CASE(self,t,f)
9811
9812 - def as_dict(self, flat=False, sanitize=True):
9813 """Experimental stuff
9814
9815 This allows to return a plain dictionary with the basic
9816 query representation. Can be used with json/xml services
9817 for client-side db I/O
9818
9819 Example:
9820 >>> q = db.auth_user.id != 0
9821 >>> q.as_dict(flat=True)
9822 {"op": "NE", "first":{"tablename": "auth_user",
9823 "fieldname": "id"},
9824 "second":0}
9825 """
9826
9827 SERIALIZABLE_TYPES = (tuple, dict, set, list, int, long, float,
9828 basestring, type(None), bool)
9829 def loop(d):
9830 newd = dict()
9831 for k, v in d.items():
9832 if k in ("first", "second"):
9833 if isinstance(v, self.__class__):
9834 newd[k] = loop(v.__dict__)
9835 elif isinstance(v, Field):
9836 newd[k] = {"tablename": v._tablename,
9837 "fieldname": v.name}
9838 elif isinstance(v, Expression):
9839 newd[k] = loop(v.__dict__)
9840 elif isinstance(v, SERIALIZABLE_TYPES):
9841 newd[k] = v
9842 elif isinstance(v, (datetime.date,
9843 datetime.time,
9844 datetime.datetime)):
9845 newd[k] = unicode(v)
9846 elif k == "op":
9847 if callable(v):
9848 newd[k] = v.__name__
9849 elif isinstance(v, basestring):
9850 newd[k] = v
9851 else: pass
9852 elif isinstance(v, SERIALIZABLE_TYPES):
9853 if isinstance(v, dict):
9854 newd[k] = loop(v)
9855 else: newd[k] = v
9856 return newd
9857
9858 if flat:
9859 return loop(self.__dict__)
9860 else: return self.__dict__
9861
9862
9863 - def as_xml(self, sanitize=True):
9870
9871 - def as_json(self, sanitize=True):
9878
9880 if not orderby:
9881 return None
9882 orderby2 = orderby[0]
9883 for item in orderby[1:]:
9884 orderby2 = orderby2 | item
9885 return orderby2
9886
9888 return (query and hasattr(query,'ignore_common_filters') and \
9889 not query.ignore_common_filters)
9890
9892
9893 """
9894 a Set represents a set of records in the database,
9895 the records are identified by the query=Query(...) object.
9896 normally the Set is generated by DAL.__call__(Query(...))
9897
9898 given a set, for example
9899 set = db(db.users.name=='Max')
9900 you can:
9901 set.update(db.users.name='Massimo')
9902 set.delete() # all elements in the set
9903 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
9904 and take subsets:
9905 subset = set(db.users.id<5)
9906 """
9907
9908 - def __init__(self, db, query, ignore_common_filters = None):
9909 self.db = db
9910 self._db = db
9911 self.dquery = None
9912
9913
9914 if isinstance(query, dict):
9915 query = self.parse(query)
9916
9917 if not ignore_common_filters is None and \
9918 use_common_filters(query) == ignore_common_filters:
9919 query = copy.copy(query)
9920 query.ignore_common_filters = ignore_common_filters
9921 self.query = query
9922
9925
9926 - def __call__(self, query, ignore_common_filters=False):
9927 if query is None:
9928 return self
9929 elif isinstance(query,Table):
9930 query = self.db._adapter.id_query(query)
9931 elif isinstance(query,str):
9932 query = Expression(self.db,query)
9933 elif isinstance(query,Field):
9934 query = query!=None
9935 if self.query:
9936 return Set(self.db, self.query & query,
9937 ignore_common_filters=ignore_common_filters)
9938 else:
9939 return Set(self.db, query,
9940 ignore_common_filters=ignore_common_filters)
9941
9942 - def _count(self,distinct=None):
9943 return self.db._adapter._count(self.query,distinct)
9944
9945 - def _select(self, *fields, **attributes):
9946 adapter = self.db._adapter
9947 tablenames = adapter.tables(self.query,
9948 attributes.get('join',None),
9949 attributes.get('left',None),
9950 attributes.get('orderby',None),
9951 attributes.get('groupby',None))
9952 fields = adapter.expand_all(fields, tablenames)
9953 return adapter._select(self.query,fields,attributes)
9954
9956 db = self.db
9957 tablename = db._adapter.get_table(self.query)
9958 return db._adapter._delete(tablename,self.query)
9959
9960 - def _update(self, **update_fields):
9965
9966 - def as_dict(self, flat=False, sanitize=True):
9967 if flat:
9968 uid = dbname = uri = None
9969 codec = self.db._db_codec
9970 if not sanitize:
9971 uri, dbname, uid = (self.db._dbname, str(self.db),
9972 self.db._db_uid)
9973 d = {"query": self.query.as_dict(flat=flat)}
9974 d["db"] = {"uid": uid, "codec": codec,
9975 "name": dbname, "uri": uri}
9976 return d
9977 else: return self.__dict__
9978
9979 - def as_xml(self, sanitize=True):
9986
9987 - def as_json(self, sanitize=True):
9994
9995 - def parse(self, dquery):
9996 "Experimental: Turn a dictionary into a Query object"
9997 self.dquery = dquery
9998 return self.build(self.dquery)
9999
10001 "Experimental: see .parse()"
10002 op, first, second = (d["op"], d["first"],
10003 d.get("second", None))
10004 left = right = built = None
10005
10006 if op in ("AND", "OR"):
10007 if not (type(first), type(second)) == (dict, dict):
10008 raise SyntaxError("Invalid AND/OR query")
10009 if op == "AND":
10010 built = self.build(first) & self.build(second)
10011 else: built = self.build(first) | self.build(second)
10012
10013 elif op == "NOT":
10014 if first is None:
10015 raise SyntaxError("Invalid NOT query")
10016 built = ~self.build(first)
10017 else:
10018
10019 for k, v in {"left": first, "right": second}.items():
10020 if isinstance(v, dict) and v.get("op"):
10021 v = self.build(v)
10022 if isinstance(v, dict) and ("tablename" in v):
10023 v = self.db[v["tablename"]][v["fieldname"]]
10024 if k == "left": left = v
10025 else: right = v
10026
10027 if hasattr(self.db._adapter, op):
10028 opm = getattr(self.db._adapter, op)
10029
10030 if op == "EQ": built = left == right
10031 elif op == "NE": built = left != right
10032 elif op == "GT": built = left > right
10033 elif op == "GE": built = left >= right
10034 elif op == "LT": built = left < right
10035 elif op == "LE": built = left <= right
10036 elif op in ("JOIN", "LEFT_JOIN", "RANDOM", "ALLOW_NULL"):
10037 built = Expression(self.db, opm)
10038 elif op in ("LOWER", "UPPER", "EPOCH", "PRIMARY_KEY",
10039 "COALESCE_ZERO", "RAW", "INVERT"):
10040 built = Expression(self.db, opm, left)
10041 elif op in ("COUNT", "EXTRACT", "AGGREGATE", "SUBSTRING",
10042 "REGEXP", "LIKE", "ILIKE", "STARTSWITH",
10043 "ENDSWITH", "ADD", "SUB", "MUL", "DIV",
10044 "MOD", "AS", "ON", "COMMA", "NOT_NULL",
10045 "COALESCE", "CONTAINS", "BELONGS"):
10046 built = Expression(self.db, opm, left, right)
10047
10048 elif not (left or right): built = Expression(self.db, op)
10049 else:
10050 raise SyntaxError("Operator not supported: %s" % op)
10051
10052 return built
10053
10055 return not self.select(limitby=(0,1), orderby_on_limitby=False)
10056
10057 - def count(self,distinct=None, cache=None):
10058 db = self.db
10059 if cache:
10060 cache_model, time_expire = cache
10061 sql = self._count(distinct=distinct)
10062 key = db._uri + '/' + sql
10063 if len(key)>200: key = hashlib_md5(key).hexdigest()
10064 return cache_model(
10065 key,
10066 (lambda self=self,distinct=distinct: \
10067 db._adapter.count(self.query,distinct)),
10068 time_expire)
10069 return db._adapter.count(self.query,distinct)
10070
10071 - def select(self, *fields, **attributes):
10072 adapter = self.db._adapter
10073 tablenames = adapter.tables(self.query,
10074 attributes.get('join',None),
10075 attributes.get('left',None),
10076 attributes.get('orderby',None),
10077 attributes.get('groupby',None))
10078 fields = adapter.expand_all(fields, tablenames)
10079 return adapter.select(self.query,fields,attributes)
10080
10083
10085 db = self.db
10086 tablename = db._adapter.get_table(self.query)
10087 table = db[tablename]
10088 if any(f(self) for f in table._before_delete): return 0
10089 ret = db._adapter.delete(tablename,self.query)
10090 ret and [f(self) for f in table._after_delete]
10091 return ret
10092
10093 - def update(self, **update_fields):
10094 db = self.db
10095 tablename = db._adapter.get_table(self.query)
10096 table = db[tablename]
10097 table._attempt_upload(update_fields)
10098 if any(f(self,update_fields) for f in table._before_update):
10099 return 0
10100 fields = table._listify(update_fields,update=True)
10101 if not fields:
10102 raise SyntaxError("No fields to update")
10103 ret = db._adapter.update("%s" % table,self.query,fields)
10104 ret and [f(self,update_fields) for f in table._after_update]
10105 return ret
10106
10108 """
10109 same as update but does not call table._before_update and _after_update
10110 """
10111 tablename = self.db._adapter.get_table(self.query)
10112 table = self.db[tablename]
10113 fields = table._listify(update_fields,update=True)
10114 if not fields: raise SyntaxError("No fields to update")
10115
10116 ret = self.db._adapter.update("%s" % table,self.query,fields)
10117 return ret
10118
10120 tablename = self.db._adapter.get_table(self.query)
10121 response = Row()
10122 response.errors = Row()
10123 new_fields = copy.copy(update_fields)
10124 for key,value in update_fields.iteritems():
10125 value,error = self.db[tablename][key].validate(value)
10126 if error:
10127 response.errors[key] = error
10128 else:
10129 new_fields[key] = value
10130 table = self.db[tablename]
10131 if response.errors:
10132 response.updated = None
10133 else:
10134 if not any(f(self,new_fields) for f in table._before_update):
10135 fields = table._listify(new_fields,update=True)
10136 if not fields: raise SyntaxError("No fields to update")
10137 ret = self.db._adapter.update(tablename,self.query,fields)
10138 ret and [f(self,new_fields) for f in table._after_update]
10139 else:
10140 ret = 0
10141 response.updated = ret
10142 return response
10143
10145 table = self.db[self.db._adapter.tables(self.query)[0]]
10146
10147 if upload_fields:
10148 fields = upload_fields.keys()
10149 else:
10150 fields = table.fields
10151 fields = [f for f in fields if table[f].type == 'upload'
10152 and table[f].uploadfield == True
10153 and table[f].autodelete]
10154 if not fields:
10155 return False
10156 for record in self.select(*[table[f] for f in fields]):
10157 for fieldname in fields:
10158 field = table[fieldname]
10159 oldname = record.get(fieldname, None)
10160 if not oldname:
10161 continue
10162 if upload_fields and oldname == upload_fields[fieldname]:
10163 continue
10164 if field.custom_delete:
10165 field.custom_delete(oldname)
10166 else:
10167 uploadfolder = field.uploadfolder
10168 if not uploadfolder:
10169 uploadfolder = pjoin(
10170 self.db._adapter.folder, '..', 'uploads')
10171 if field.uploadseparate:
10172 items = oldname.split('.')
10173 uploadfolder = pjoin(
10174 uploadfolder,
10175 "%s.%s" % (items[0], items[1]),
10176 items[2][:2])
10177 oldpath = pjoin(uploadfolder, oldname)
10178 if exists(oldpath):
10179 os.unlink(oldpath)
10180 return False
10181
10183 - def __init__(self, colset, table, id):
10184 self.colset, self.db, self.tablename, self.id = \
10185 colset, table._db, table._tablename, id
10186
10188 colset, db, tablename, id = self.colset, self.db, self.tablename, self.id
10189 table = db[tablename]
10190 newfields = fields or dict(colset)
10191 for fieldname in newfields.keys():
10192 if not fieldname in table.fields or table[fieldname].type=='id':
10193 del newfields[fieldname]
10194 table._db(table._id==id,ignore_common_filters=True).update(**newfields)
10195 colset.update(newfields)
10196 return colset
10197
10200 self.db, self.tablename, self.id = table._db, table._tablename, id
10202 return self.db(self.db[self.tablename]._id==self.id).delete()
10203
10206 self.db, self.tablename, self.id = table._db, table._tablename, id
10208 if self.db._lazy_tables is False:
10209 raise AttributeError()
10210 table = self.db[self.tablename]
10211 other_table = self.db[other_tablename]
10212 for rfield in table._referenced_by:
10213 if rfield.table == other_table:
10214 return LazySet(rfield, self.id)
10215
10216 raise AttributeError()
10217
10220 self.db, self.tablename, self.fieldname, self.id = \
10221 field.db, field._tablename, field.name, id
10223 query = self.db[self.tablename][self.fieldname]==self.id
10224 return Set(self.db,query)
10227 - def __call__(self, query, ignore_common_filters=False):
10228 return self._getset()(query, ignore_common_filters)
10229 - def _count(self,distinct=None):
10231 - def _select(self, *fields, **attributes):
10235 - def _update(self, **update_fields):
10239 - def count(self,distinct=None, cache=None):
10241 - def select(self, *fields, **attributes):
10247 - def update(self, **update_fields):
10255
10258 self.method=method
10259 self.row=row
10261 return self.method(self.row,*args,**kwargs)
10262
10264 f.__lazy__ = True
10265 return f
10266
10267 -class Rows(object):
10268
10269 """
10270 A wrapper for the return value of a select. It basically represents a table.
10271 It has an iterator and each row is represented as a dictionary.
10272 """
10273
10274
10275
10276 - def __init__(
10277 self,
10278 db=None,
10279 records=[],
10280 colnames=[],
10281 compact=True,
10282 rawrows=None
10283 ):
10284 self.db = db
10285 self.records = records
10286 self.colnames = colnames
10287 self.compact = compact
10288 self.response = rawrows
10289
10291 return '<Rows (%s)>' % len(self.records)
10292
10294 """
10295 db.define_table('x',Field('number','integer'))
10296 if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]
10297
10298 from gluon.dal import lazy_virtualfield
10299
10300 class MyVirtualFields(object):
10301 # normal virtual field (backward compatible, discouraged)
10302 def normal_shift(self): return self.x.number+1
10303 # lazy virtual field (because of @staticmethod)
10304 @lazy_virtualfield
10305 def lazy_shift(instance,row,delta=4): return row.x.number+delta
10306 db.x.virtualfields.append(MyVirtualFields())
10307
10308 for row in db(db.x).select():
10309 print row.number, row.normal_shift, row.lazy_shift(delta=7)
10310 """
10311 if not keyed_virtualfields:
10312 return self
10313 for row in self.records:
10314 for (tablename,virtualfields) in keyed_virtualfields.iteritems():
10315 attributes = dir(virtualfields)
10316 if not tablename in row:
10317 box = row[tablename] = Row()
10318 else:
10319 box = row[tablename]
10320 updated = False
10321 for attribute in attributes:
10322 if attribute[0] != '_':
10323 method = getattr(virtualfields,attribute)
10324 if hasattr(method,'__lazy__'):
10325 box[attribute]=VirtualCommand(method,row)
10326 elif type(method)==types.MethodType:
10327 if not updated:
10328 virtualfields.__dict__.update(row)
10329 updated = True
10330 box[attribute]=method()
10331 return self
10332
10334 if self.colnames!=other.colnames:
10335 raise Exception('Cannot & incompatible Rows objects')
10336 records = self.records+other.records
10337 return Rows(self.db,records,self.colnames)
10338
10340 if self.colnames!=other.colnames:
10341 raise Exception('Cannot | incompatible Rows objects')
10342 records = self.records
10343 records += [record for record in other.records \
10344 if not record in records]
10345 return Rows(self.db,records,self.colnames)
10346
10348 if len(self.records):
10349 return 1
10350 return 0
10351
10353 return len(self.records)
10354
10356 return Rows(self.db,self.records[a:b],self.colnames,compact=self.compact)
10357
10359 row = self.records[i]
10360 keys = row.keys()
10361 if self.compact and len(keys) == 1 and keys[0] != '_extra':
10362 return row[row.keys()[0]]
10363 return row
10364
10366 """
10367 iterator over records
10368 """
10369
10370 for i in xrange(len(self)):
10371 yield self[i]
10372
10374 """
10375 serializes the table into a csv file
10376 """
10377
10378 s = StringIO.StringIO()
10379 self.export_to_csv_file(s)
10380 return s.getvalue()
10381
10383 if not self.records:
10384 return None
10385 return self[0]
10386
10388 if not self.records:
10389 return None
10390 return self[-1]
10391
10392 - def find(self,f,limitby=None):
10393 """
10394 returns a new Rows object, a subset of the original object,
10395 filtered by the function f
10396 """
10397 if not self:
10398 return Rows(self.db, [], self.colnames)
10399 records = []
10400 if limitby:
10401 a,b = limitby
10402 else:
10403 a,b = 0,len(self)
10404 k = 0
10405 for row in self:
10406 if f(row):
10407 if a<=k: records.append(row)
10408 k += 1
10409 if k==b: break
10410 return Rows(self.db, records, self.colnames)
10411
10413 """
10414 removes elements from the calling Rows object, filtered by the function f,
10415 and returns a new Rows object containing the removed elements
10416 """
10417 if not self.records:
10418 return Rows(self.db, [], self.colnames)
10419 removed = []
10420 i=0
10421 while i<len(self):
10422 row = self[i]
10423 if f(row):
10424 removed.append(self.records[i])
10425 del self.records[i]
10426 else:
10427 i += 1
10428 return Rows(self.db, removed, self.colnames)
10429
10430 - def sort(self, f, reverse=False):
10431 """
10432 returns a list of sorted elements (not sorted in place)
10433 """
10434 rows = Rows(self.db,[],self.colnames,compact=False)
10435 rows.records = sorted(self,key=f,reverse=reverse)
10436 return rows
10437
10439 """
10440 regroups the rows, by one of the fields
10441 """
10442 one_result = False
10443 if 'one_result' in args:
10444 one_result = args['one_result']
10445
10446 def build_fields_struct(row, fields, num, groups):
10447 ''' helper function:
10448 '''
10449 if num > len(fields)-1:
10450 if one_result:
10451 return row
10452 else:
10453 return [row]
10454
10455 key = fields[num]
10456 value = row[key]
10457
10458 if value not in groups:
10459 groups[value] = build_fields_struct(row, fields, num+1, {})
10460 else:
10461 struct = build_fields_struct(row, fields, num+1, groups[ value ])
10462
10463
10464 if type(struct) == type(dict()):
10465 groups[value].update()
10466
10467 elif type(struct) == type(list()):
10468 groups[value] += struct
10469
10470 else:
10471 groups[value] = struct
10472
10473 return groups
10474
10475 if len(fields) == 0:
10476 return self
10477
10478
10479 if not self.records:
10480 return {}
10481
10482 grouped_row_group = dict()
10483
10484
10485 for row in self:
10486 build_fields_struct(row, fields, 0, grouped_row_group)
10487
10488 return grouped_row_group
10489
10490 - def render(self, i=None, fields=None):
10491 """
10492 Takes an index and returns a copy of the indexed row with values
10493 transformed via the "represent" attributes of the associated fields.
10494
10495 If no index is specified, a generator is returned for iteration
10496 over all the rows.
10497
10498 fields -- a list of fields to transform (if None, all fields with
10499 "represent" attributes will be transformed).
10500 """
10501
10502
10503 if i is None:
10504 return (self.repr(i, fields=fields) for i in range(len(self)))
10505 import sqlhtml
10506 row = copy.deepcopy(self.records[i])
10507 keys = row.keys()
10508 tables = [f.tablename for f in fields] if fields \
10509 else [k for k in keys if k != '_extra']
10510 for table in tables:
10511 repr_fields = [f.name for f in fields if f.tablename == table] \
10512 if fields else [k for k in row[table].keys()
10513 if (hasattr(self.db[table], k) and
10514 isinstance(self.db[table][k], Field)
10515 and self.db[table][k].represent)]
10516 for field in repr_fields:
10517 row[table][field] = sqlhtml.represent(
10518 self.db[table][field], row[table][field], row[table])
10519 if self.compact and len(keys) == 1 and keys[0] != '_extra':
10520 return row[keys[0]]
10521 return row
10522
10523 - def as_list(self,
10524 compact=True,
10525 storage_to_dict=True,
10526 datetime_to_str=False,
10527 custom_types=None):
10528 """
10529 returns the data as a list or dictionary.
10530 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
10531 :param datetime_to_str: convert datetime fields as strings (default False)
10532 """
10533 (oc, self.compact) = (self.compact, compact)
10534 if storage_to_dict:
10535 items = [item.as_dict(datetime_to_str, custom_types) for item in self]
10536 else:
10537 items = [item for item in self]
10538 self.compact = compact
10539 return items
10540
10541
10542 - def as_dict(self,
10543 key='id',
10544 compact=True,
10545 storage_to_dict=True,
10546 datetime_to_str=False,
10547 custom_types=None):
10548 """
10549 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
10550
10551 :param key: the name of the field to be used as dict key, normally the id
10552 :param compact: ? (default True)
10553 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
10554 :param datetime_to_str: convert datetime fields as strings (default False)
10555 """
10556
10557
10558 multi = False
10559 f = self.first()
10560 if f and isinstance(key, basestring):
10561 multi = any([isinstance(v, f.__class__) for v in f.values()])
10562 if (not "." in key) and multi:
10563
10564 def new_key():
10565 i = 0
10566 while True:
10567 yield i
10568 i += 1
10569 key_generator = new_key()
10570 key = lambda r: key_generator.next()
10571
10572 rows = self.as_list(compact, storage_to_dict, datetime_to_str, custom_types)
10573 if isinstance(key,str) and key.count('.')==1:
10574 (table, field) = key.split('.')
10575 return dict([(r[table][field],r) for r in rows])
10576 elif isinstance(key,str):
10577 return dict([(r[key],r) for r in rows])
10578 else:
10579 return dict([(key(r),r) for r in rows])
10580
10582 """
10583 export data to csv, the first line contains the column names
10584
10585 :param ofile: where the csv must be exported to
10586 :param null: how null values must be represented (default '<NULL>')
10587 :param delimiter: delimiter to separate values (default ',')
10588 :param quotechar: character to use to quote string values (default '"')
10589 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
10590 :param represent: use the fields .represent value (default False)
10591 :param colnames: list of column names to use (default self.colnames)
10592 This will only work when exporting rows objects!!!!
10593 DO NOT use this with db.export_to_csv()
10594 """
10595 delimiter = kwargs.get('delimiter', ',')
10596 quotechar = kwargs.get('quotechar', '"')
10597 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
10598 represent = kwargs.get('represent', False)
10599 writer = csv.writer(ofile, delimiter=delimiter,
10600 quotechar=quotechar, quoting=quoting)
10601 colnames = kwargs.get('colnames', self.colnames)
10602 write_colnames = kwargs.get('write_colnames',True)
10603
10604 if write_colnames:
10605 writer.writerow(colnames)
10606
10607 def none_exception(value):
10608 """
10609 returns a cleaned up value that can be used for csv export:
10610 - unicode text is encoded as such
10611 - None values are replaced with the given representation (default <NULL>)
10612 """
10613 if value is None:
10614 return null
10615 elif isinstance(value, unicode):
10616 return value.encode('utf8')
10617 elif isinstance(value,Reference):
10618 return long(value)
10619 elif hasattr(value, 'isoformat'):
10620 return value.isoformat()[:19].replace('T', ' ')
10621 elif isinstance(value, (list,tuple)):
10622 return bar_encode(value)
10623 return value
10624
10625 for record in self:
10626 row = []
10627 for col in colnames:
10628 if not REGEX_TABLE_DOT_FIELD.match(col):
10629 row.append(record._extra[col])
10630 else:
10631 (t, f) = col.split('.')
10632 field = self.db[t][f]
10633 if isinstance(record.get(t, None), (Row,dict)):
10634 value = record[t][f]
10635 else:
10636 value = record[f]
10637 if field.type=='blob' and not value is None:
10638 value = base64.b64encode(value)
10639 elif represent and field.represent:
10640 value = field.represent(value)
10641 row.append(none_exception(value))
10642 writer.writerow(row)
10643
10644 - def xml(self,strict=False,row_name='row',rows_name='rows'):
10645 """
10646 serializes the table using sqlhtml.SQLTABLE (if present)
10647 """
10648
10649 if strict:
10650 ncols = len(self.colnames)
10651 return '<%s>\n%s\n</%s>' % (rows_name,
10652 '\n'.join(row.as_xml(row_name=row_name,
10653 colnames=self.colnames) for
10654 row in self), rows_name)
10655
10656 import sqlhtml
10657 return sqlhtml.SQLTABLE(self).xml()
10658
10659 - def as_xml(self,row_name='row',rows_name='rows'):
10660 return self.xml(strict=True, row_name=row_name, rows_name=rows_name)
10661
10662 - def as_json(self, mode='object', default=None):
10663 """
10664 serializes the rows to a JSON list or object with objects
10665 mode='object' is not implemented (should return a nested
10666 object structure)
10667 """
10668
10669 items = [record.as_json(mode=mode, default=default,
10670 serialize=False,
10671 colnames=self.colnames) for
10672 record in self]
10673
10674 if have_serializers:
10675 return serializers.json(items,
10676 default=default or
10677 serializers.custom_json)
10678 elif simplejson:
10679 return simplejson.dumps(items)
10680 else:
10681 raise RuntimeError("missing simplejson")
10682
10683
10684 as_csv = __str__
10685 json = as_json
10686
10687
10688
10689
10690
10691
10692 -def test_all():
10693 """
10694
10695 >>> if len(sys.argv)<2: db = DAL("sqlite://test.db")
10696 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
10697 >>> tmp = db.define_table('users',\
10698 Field('stringf', 'string', length=32, required=True),\
10699 Field('booleanf', 'boolean', default=False),\
10700 Field('passwordf', 'password', notnull=True),\
10701 Field('uploadf', 'upload'),\
10702 Field('blobf', 'blob'),\
10703 Field('integerf', 'integer', unique=True),\
10704 Field('doublef', 'double', unique=True,notnull=True),\
10705 Field('jsonf', 'json'),\
10706 Field('datef', 'date', default=datetime.date.today()),\
10707 Field('timef', 'time'),\
10708 Field('datetimef', 'datetime'),\
10709 migrate='test_user.table')
10710
10711 Insert a field
10712
10713 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
10714 uploadf=None, integerf=5, doublef=3.14,\
10715 jsonf={"j": True},\
10716 datef=datetime.date(2001, 1, 1),\
10717 timef=datetime.time(12, 30, 15),\
10718 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
10719 1
10720
10721 Drop the table
10722
10723 >>> db.users.drop()
10724
10725 Examples of insert, select, update, delete
10726
10727 >>> tmp = db.define_table('person',\
10728 Field('name'),\
10729 Field('birth','date'),\
10730 migrate='test_person.table')
10731 >>> person_id = db.person.insert(name='Marco',birth='2005-06-22')
10732 >>> person_id = db.person.insert(name='Massimo',birth='1971-12-21')
10733
10734 commented len(db().select(db.person.ALL))
10735 commented 2
10736
10737 >>> me = db(db.person.id==person_id).select()[0] # test select
10738 >>> me.name
10739 'Massimo'
10740 >>> db.person[2].name
10741 'Massimo'
10742 >>> db.person(2).name
10743 'Massimo'
10744 >>> db.person(name='Massimo').name
10745 'Massimo'
10746 >>> db.person(db.person.name=='Massimo').name
10747 'Massimo'
10748 >>> row = db.person[2]
10749 >>> row.name == row['name'] == row['person.name'] == row('person.name')
10750 True
10751 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
10752 1
10753 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
10754 1
10755
10756 Update a single record
10757
10758 >>> me.update_record(name="Max")
10759 <Row {'name': 'Max', 'birth': datetime.date(1971, 12, 21), 'id': 2}>
10760 >>> me.name
10761 'Max'
10762
10763 Examples of complex search conditions
10764
10765 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
10766 1
10767 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
10768 1
10769 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
10770 1
10771 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
10772 >>> me.name
10773 'Max'
10774
10775 Examples of search conditions using extract from date/datetime/time
10776
10777 >>> len(db(db.person.birth.month()==12).select())
10778 1
10779 >>> len(db(db.person.birth.year()>1900).select())
10780 1
10781
10782 Example of usage of NULL
10783
10784 >>> len(db(db.person.birth==None).select()) ### test NULL
10785 0
10786 >>> len(db(db.person.birth!=None).select()) ### test NULL
10787 1
10788
10789 Examples of search conditions using lower, upper, and like
10790
10791 >>> len(db(db.person.name.upper()=='MAX').select())
10792 1
10793 >>> len(db(db.person.name.like('%ax')).select())
10794 1
10795 >>> len(db(db.person.name.upper().like('%AX')).select())
10796 1
10797 >>> len(db(~db.person.name.upper().like('%AX')).select())
10798 0
10799
10800 orderby, groupby and limitby
10801
10802 >>> people = db().select(db.person.name, orderby=db.person.name)
10803 >>> order = db.person.name|~db.person.birth
10804 >>> people = db().select(db.person.name, orderby=order)
10805
10806 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
10807
10808 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
10809
10810 Example of one 2 many relation
10811
10812 >>> tmp = db.define_table('dog',\
10813 Field('name'),\
10814 Field('birth','date'),\
10815 Field('owner',db.person),\
10816 migrate='test_dog.table')
10817 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
10818 1
10819
10820 A simple JOIN
10821
10822 >>> len(db(db.dog.owner==db.person.id).select())
10823 1
10824
10825 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
10826 1
10827
10828 Drop tables
10829
10830 >>> db.dog.drop()
10831 >>> db.person.drop()
10832
10833 Example of many 2 many relation and Set
10834
10835 >>> tmp = db.define_table('author', Field('name'),\
10836 migrate='test_author.table')
10837 >>> tmp = db.define_table('paper', Field('title'),\
10838 migrate='test_paper.table')
10839 >>> tmp = db.define_table('authorship',\
10840 Field('author_id', db.author),\
10841 Field('paper_id', db.paper),\
10842 migrate='test_authorship.table')
10843 >>> aid = db.author.insert(name='Massimo')
10844 >>> pid = db.paper.insert(title='QCD')
10845 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
10846
10847 Define a Set
10848
10849 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
10850 >>> rows = authored_papers.select(db.author.name, db.paper.title)
10851 >>> for row in rows: print row.author.name, row.paper.title
10852 Massimo QCD
10853
10854 Example of search condition using belongs
10855
10856 >>> set = (1, 2, 3)
10857 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
10858 >>> print rows[0].title
10859 QCD
10860
10861 Example of search condition using nested select
10862
10863 >>> nested_select = db()._select(db.authorship.paper_id)
10864 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
10865 >>> print rows[0].title
10866 QCD
10867
10868 Example of expressions
10869
10870 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
10871 >>> db(mynumber).delete()
10872 0
10873 >>> for i in range(10): tmp = mynumber.insert(x=i)
10874 >>> db(mynumber).select(mynumber.x.sum())[0](mynumber.x.sum())
10875 45
10876
10877 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
10878 5
10879
10880 Output in csv
10881
10882 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
10883 author.name,paper.title\r
10884 Massimo,QCD
10885
10886 Delete all leftover tables
10887
10888 >>> DAL.distributed_transaction_commit(db)
10889
10890 >>> db.mynumber.drop()
10891 >>> db.authorship.drop()
10892 >>> db.author.drop()
10893 >>> db.paper.drop()
10894 """
10895
10896
10897
10898
10899 SQLField = Field
10900 SQLTable = Table
10901 SQLXorable = Expression
10902 SQLQuery = Query
10903 SQLSet = Set
10904 SQLRows = Rows
10905 SQLStorage = Row
10906 SQLDB = DAL
10907 GQLDB = DAL
10908 DAL.Field = Field
10909 DAL.Table = Table
10910
10911
10912
10913
10914
10915 -def geoPoint(x,y):
10916 return "POINT (%f %f)" % (x,y)
10917
10919 return "LINESTRING (%s)" % ','.join("%f %f" % item for item in line)
10920
10922 return "POLYGON ((%s))" % ','.join("%f %f" % item for item in line)
10923
10924
10925
10926
10927
10928 if __name__ == '__main__':
10929 import doctest
10930 doctest.testmod()
10931